commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
a99150d5c98074bde9218d6feb68f4cb200a0e4c
q_and_a/urls.py
q_and_a/urls.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.conf.urls import patterns, include, url from django.conf.urls.static import static from django.views.generic import TemplateView # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^organisations', include('organisations.urls')), url(r'^$', TemplateView.as_view(template_name="home.html"), name='home'), # url(r'^', include('prototype.urls')), ) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.conf.urls import patterns, include, url from django.conf.urls.static import static from django.views.generic import TemplateView # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^', include('prototype.urls')), ) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Use URLs from the prototype app
Use URLs from the prototype app
Python
bsd-3-clause
DemocracyClub/candidate_questions,DemocracyClub/candidate_questions,DemocracyClub/candidate_questions
--- +++ @@ -13,7 +13,5 @@ urlpatterns = patterns('', # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), - url(r'^organisations', include('organisations.urls')), - url(r'^$', TemplateView.as_view(template_name="home.html"), name='home'), - # url(r'^', include('prototype.urls')), + url(r'^', include('prototype.urls')), ) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
580868c63f61bb7f6576dc7b0029aa137e274a51
qnd/__init__.py
qnd/__init__.py
"""Quick and Distributed TensorFlow command framework""" from .flag import * from .run import def_run
"""Quick and Distributed TensorFlow command framework""" from .flag import * from .run import def_run __all__ = ["add_flag", "add_required_flag", "FlagAdder", "def_run"]
Add __all__ variable to top level package
Add __all__ variable to top level package
Python
unlicense
raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd
--- +++ @@ -2,3 +2,5 @@ from .flag import * from .run import def_run + +__all__ = ["add_flag", "add_required_flag", "FlagAdder", "def_run"]
697120d4e693bf7fbc192164b5df3dfb30f71a3f
tests/__init__.py
tests/__init__.py
import logging import unittest import os from sqlalchemy import create_engine from rtrss import config, database logging.disable(logging.ERROR) engine = create_engine(config.SQLALCHEMY_DATABASE_URI, echo=False, client_encoding='utf8') # Reconfigure session factory to use our test schema database.Session.configure(bind=engine) class AttrDict(dict): """Class to make mock objects""" def __init__(self, *args, **kwargs): super(AttrDict, self).__init__(*args, **kwargs) self.__dict__ = self class RTRSSTestCase(unittest.TestCase): @classmethod def setUpClass(cls): if os.path.isdir(config.DATA_DIR): os.rmdir(config.DATA_DIR) os.makedirs(config.DATA_DIR) @classmethod def tearDownClass(cls): os.rmdir(config.DATA_DIR) class RTRSSDataBaseTestCase(RTRSSTestCase): def setUp(self): database.clear(engine) database.init(engine) self.db = database.Session() def tearDown(self): database.clear(engine) self.db.close()
import logging import unittest import os import shutil from sqlalchemy import create_engine from rtrss import config, database logging.disable(logging.ERROR) engine = create_engine(config.SQLALCHEMY_DATABASE_URI, echo=False, client_encoding='utf8') # Reconfigure session factory to use our test schema database.Session.configure(bind=engine) class AttrDict(dict): """Class to make mock objects""" def __init__(self, *args, **kwargs): super(AttrDict, self).__init__(*args, **kwargs) self.__dict__ = self class RTRSSTestCase(unittest.TestCase): @classmethod def setUpClass(cls): if os.path.isdir(config.DATA_DIR): os.rmdir(config.DATA_DIR) os.makedirs(config.DATA_DIR) @classmethod def tearDownClass(cls): shutil.rmtree(config.DATA_DIR) class RTRSSDataBaseTestCase(RTRSSTestCase): def setUp(self): database.clear(engine) database.init(engine) self.db = database.Session() def tearDown(self): database.clear(engine) self.db.close()
Remove test data folder with contents
Remove test data folder with contents
Python
apache-2.0
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
--- +++ @@ -1,6 +1,7 @@ import logging import unittest import os +import shutil from sqlalchemy import create_engine @@ -32,7 +33,7 @@ @classmethod def tearDownClass(cls): - os.rmdir(config.DATA_DIR) + shutil.rmtree(config.DATA_DIR) class RTRSSDataBaseTestCase(RTRSSTestCase):
2673f1bac21e43a4cad9edb7352f89750d6d0144
tests/settings.py
tests/settings.py
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. """ Settings for tests. """ from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage"
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. """ Settings for tests. """ from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" ALLOW_ANONYMOUS_ACCESS = False
Enforce that tests run with anonymous access off.
Enforce that tests run with anonymous access off.
Python
bsd-2-clause
mozilla/moztrap,mccarrmb/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,mozilla/moztrap,shinglyu/moztrap,mccarrmb/moztrap,shinglyu/moztrap,shinglyu/moztrap,mccarrmb/moztrap,mozilla/moztrap,bobsilverberg/moztrap,mozilla/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,bobsilverberg/moztrap
--- +++ @@ -22,3 +22,4 @@ from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" +ALLOW_ANONYMOUS_ACCESS = False
15c8215415d36da4fac9c7333e62239f7b81c12d
test/support/mock_definitions.py
test/support/mock_definitions.py
# Generates validation/input definitions as if they were created by splunk for tests class MockDefinitions(object): def __init__(self, session_key=None): self.session_key = session_key if session_key is not None else '123456789' @property def metadata(self): host = os.getenv('SPLUNK_API_HOST', 'localhost') port = os.getenv('SPLUNK_API_PORT', 8089), return {'server_uri': 'https://{host}:{port}/', 'session_key': self.session_key, 'name': 'amp4e_events_test_input'}
import os # Generates validation/input definitions as if they were created by splunk for tests class MockDefinitions(object): def __init__(self, session_key=None): self.session_key = session_key if session_key is not None else '123456789' @property def metadata(self): host = os.getenv('SPLUNK_API_HOST', '127.0.0.1') return {'server_uri': 'https://{host}:8089/'.format(host=host), 'session_key': self.session_key, 'name': 'amp4e_events_test_input'}
Change mock to be env dependant
Change mock to be env dependant
Python
bsd-2-clause
Cisco-AMP/amp4e_splunk_events_input,Cisco-AMP/amp4e_splunk_events_input,Cisco-AMP/amp4e_splunk_events_input,Cisco-AMP/amp4e_splunk_events_input
--- +++ @@ -1,3 +1,4 @@ +import os # Generates validation/input definitions as if they were created by splunk for tests class MockDefinitions(object): def __init__(self, session_key=None): @@ -5,7 +6,6 @@ @property def metadata(self): - host = os.getenv('SPLUNK_API_HOST', 'localhost') - port = os.getenv('SPLUNK_API_PORT', 8089), - return {'server_uri': 'https://{host}:{port}/', 'session_key': self.session_key, + host = os.getenv('SPLUNK_API_HOST', '127.0.0.1') + return {'server_uri': 'https://{host}:8089/'.format(host=host), 'session_key': self.session_key, 'name': 'amp4e_events_test_input'}
2e3d133874e1df647df146ce90e0f4e2ccf84ef4
todo-list/todo.py
todo-list/todo.py
#!/usr/bin/env python # My solution to the following challenge: https://redd.it/39ws1x from datetime import date from collections import defaultdict class Todo: def __init__(self): self.items = defaultdict(list) def add_item(self, item, tag): self.items[tag].append(item) def remove_item(self, item, tag): self.items[tag].remove(item) def print_all_items(self): for (tag, items) in self.items.items(): print str(tag) + ':' for item in items: print ' ' + item print '' if __name__ == '__main__': todo = Todo() # I actually need to do this, for real. todo.add_item('Get an oil change.', 'Car') todo.add_item('Plastidip my wheels.', 'Car') todo.add_item('Clean my room.', 'Housework') todo.print_all_items() todo.remove_item('Get an oil change.', 'Car') todo.print_all_items()
#!/usr/bin/env python # My solution to the following challenge: https://redd.it/39ws1x import os from datetime import date from collections import defaultdict home = os.path.expanduser('~') class Todo: def __init__(self): self.items = defaultdict(list) def __load_items(self): try: with open(home + '/.config/todo/list', 'r') as todo: for item in todo.readline(): # TODO: Need to figure out a way to store tags and items. pass except IOError: print 'You do not have any items to load.' def __save_items(self): try: with open(home + '/.config/todo/list', 'w') as todo: # TODO: Implement saving items. pass def add_item(self, item, tag): self.items[tag].append(item) def remove_item(self, item, tag): if tag in self.items: if item in self.items[tag]: self.items[tag].remove(item) else: print "Item %s could not be found." % item else: print 'There is not tag named %s' % tag def print_all_items(self): for (tag, items) in self.items.items(): print str(tag) + ':' for item in items: print ' ' + item print '' if __name__ == '__main__': todo = Todo() # I actually need to do this, for real. todo.add_item('Get an oil change.', 'Car') todo.add_item('Plastidip my wheels.', 'Car') todo.add_item('Clean my room.', 'Housework') todo.print_all_items() todo.remove_item('Get an oil change.', 'Car') todo.remove_item('x', 'x') todo.remove_item('x', 'Housework') todo.print_all_items()
Add and remove now work with minimal error checking.
Add and remove now work with minimal error checking.
Python
mit
Kredns/python
--- +++ @@ -1,18 +1,42 @@ #!/usr/bin/env python # My solution to the following challenge: https://redd.it/39ws1x +import os from datetime import date from collections import defaultdict + +home = os.path.expanduser('~') class Todo: def __init__(self): self.items = defaultdict(list) + def __load_items(self): + try: + with open(home + '/.config/todo/list', 'r') as todo: + for item in todo.readline(): + # TODO: Need to figure out a way to store tags and items. + pass + except IOError: + print 'You do not have any items to load.' + + def __save_items(self): + try: + with open(home + '/.config/todo/list', 'w') as todo: + # TODO: Implement saving items. + pass + def add_item(self, item, tag): self.items[tag].append(item) def remove_item(self, item, tag): - self.items[tag].remove(item) + if tag in self.items: + if item in self.items[tag]: + self.items[tag].remove(item) + else: + print "Item %s could not be found." % item + else: + print 'There is not tag named %s' % tag def print_all_items(self): for (tag, items) in self.items.items(): @@ -29,4 +53,6 @@ todo.add_item('Clean my room.', 'Housework') todo.print_all_items() todo.remove_item('Get an oil change.', 'Car') + todo.remove_item('x', 'x') + todo.remove_item('x', 'Housework') todo.print_all_items()
5c3b93649a32e4a3007a428473af5235a40ac92e
__openerp__.py
__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # ############################################################################## { 'name': u"Asset Streamline", 'version': u"1.0", 'author': u"XCG Consulting", 'category': u"Custom Module", 'description': u"""Includes several integrity fixes and optimizations over the standard module. """, 'website': u"", 'depends': [ 'base', 'account_streamline', 'analytic_structure' 'account_asset', 'oemetasl', ], 'data': [ 'data/asset_sequence.xml', 'security/ir.model.access.csv', 'wizard/account_asset_close_view.xml', 'wizard/account_asset_suspend_view.xml', 'wizard/account_asset_change_values_view.xml', 'wizard/account_asset_depreciation_wizard.xml', 'wizard/account_asset_change_duration_view.xml', 'views/account_asset_view.xml', ], 'demo': [ 'demo/account_asset_demo.xml' ], 'css': [ 'static/src/css/account_asset_streamline.css' ], 'test': [], 'installable': True, 'active': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- ############################################################################## # ############################################################################## { 'name': u"Asset Streamline", 'version': u"1.2.1", 'author': u"XCG Consulting", 'category': u"Custom Module", 'description': u"""Includes several integrity fixes and optimizations over the standard module. """, 'website': u"", 'depends': [ 'base', 'account_streamline', 'analytic_structure' 'account_asset', 'oemetasl', ], 'data': [ 'data/asset_sequence.xml', 'security/ir.model.access.csv', 'wizard/account_asset_close_view.xml', 'wizard/account_asset_suspend_view.xml', 'wizard/account_asset_change_values_view.xml', 'wizard/account_asset_depreciation_wizard.xml', 'wizard/account_asset_change_duration_view.xml', 'views/account_asset_view.xml', ], 'demo': [ 'demo/account_asset_demo.xml' ], 'css': [ 'static/src/css/account_asset_streamline.css' ], 'test': [], 'installable': True, 'active': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Change version to 1.2.1 (dev)
Change version to 1.2.1 (dev)
Python
agpl-3.0
xcgd/account_asset_streamline
--- +++ @@ -4,7 +4,7 @@ ############################################################################## { 'name': u"Asset Streamline", - 'version': u"1.0", + 'version': u"1.2.1", 'author': u"XCG Consulting", 'category': u"Custom Module", 'description': u"""Includes several integrity fixes and optimizations over
1d2d3f5dcab390244c58aa04305d74570ace5e44
pyramid_authsanity/__init__.py
pyramid_authsanity/__init__.py
from .interfaces import ( IAuthSourceService, IAuthService, ) from .policy import ( AuthServicePolicy, NoAuthCompleted, ) def includeme(config): config.set_authentication_policy(AuthServicePolicy())
from pyramid.settings import ( asbool, aslist, ) from .interfaces import ( IAuthSourceService, IAuthService, ) from .policy import ( AuthServicePolicy, NoAuthCompleted, ) from .sources import ( SessionAuthSourceInitializer, CookieAuthSourceInitializer, ) from .util import ( int_or_none, kw_from_settings, ) default_settings = ( ('source', str, ''), ('cookie.cookie_name', str, 'auth'), ('cookie.max_age', int_or_none, None), ('cookie.httponly', asbool, False), ('cookie.path', str, '/'), ('cookie.domains', aslist, []), ('cookie.debug', asbool, False), ('session.value_key', str, 'sanity.'), ) def init_cookie_source(config, settings): if 'authsanity.secret' not in settings: raise RuntimeError('authsanity.secret is required for cookie based storage') kw = kw_from_settings(settings, 'authsanity.cookie.') config.register_service_factory(CookieAuthSourceInitializer(settings['authsanity.secret'], **kw), iface=IAuthSourceService) def init_session_source(config, settings): kw = kw_from_settings(settings, 'authsanity.session.') config.register_service_factory(SessionAuthSourceInitializer(**kw), iface=IAuthSourceService) default_sources = { 'cookie': init_cookie_source, 'session': init_session_source, } # Stolen from pyramid_debugtoolbar def parse_settings(settings): parsed = {} def populate(name, convert, default): name = '%s%s' % ('authsanity.', name) value = convert(settings.get(name, default)) parsed[name] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): # Go parse the settings settings = parse_settings(config.registry.settings) # Update the config config.registry.settings.update(settings) # include pyramid_services config.include('pyramid_services') if settings['authsanity.source'] in default_sources: default_sources[settings['authsanity.source']](config, config.registry.settings) config.set_authentication_policy(AuthServicePolicy())
Add settings that can set various defaults
Add settings that can set various defaults This makes it very simple to use pyramid_authsanity with one of two sources, as well as allowing the user to tweak the settings for the source of their choosing.
Python
isc
usingnamespace/pyramid_authsanity
--- +++ @@ -1,3 +1,8 @@ +from pyramid.settings import ( + asbool, + aslist, + ) + from .interfaces import ( IAuthSourceService, IAuthService, @@ -8,5 +13,69 @@ NoAuthCompleted, ) +from .sources import ( + SessionAuthSourceInitializer, + CookieAuthSourceInitializer, + ) + +from .util import ( + int_or_none, + kw_from_settings, + ) + +default_settings = ( + ('source', str, ''), + ('cookie.cookie_name', str, 'auth'), + ('cookie.max_age', int_or_none, None), + ('cookie.httponly', asbool, False), + ('cookie.path', str, '/'), + ('cookie.domains', aslist, []), + ('cookie.debug', asbool, False), + ('session.value_key', str, 'sanity.'), +) + +def init_cookie_source(config, settings): + if 'authsanity.secret' not in settings: + raise RuntimeError('authsanity.secret is required for cookie based storage') + + kw = kw_from_settings(settings, 'authsanity.cookie.') + + config.register_service_factory(CookieAuthSourceInitializer(settings['authsanity.secret'], **kw), iface=IAuthSourceService) + +def init_session_source(config, settings): + kw = kw_from_settings(settings, 'authsanity.session.') + + config.register_service_factory(SessionAuthSourceInitializer(**kw), iface=IAuthSourceService) + +default_sources = { + 'cookie': init_cookie_source, + 'session': init_session_source, +} + +# Stolen from pyramid_debugtoolbar +def parse_settings(settings): + parsed = {} + + def populate(name, convert, default): + name = '%s%s' % ('authsanity.', name) + value = convert(settings.get(name, default)) + parsed[name] = value + for name, convert, default in default_settings: + populate(name, convert, default) + return parsed + + def includeme(config): + # Go parse the settings + settings = parse_settings(config.registry.settings) + + # Update the config + config.registry.settings.update(settings) + + # include pyramid_services + config.include('pyramid_services') + + if settings['authsanity.source'] in default_sources: + default_sources[settings['authsanity.source']](config, config.registry.settings) + config.set_authentication_policy(AuthServicePolicy())
4f2dabc45f22a9ad6350ab33267e4bdf4a00b4ea
tests/testapp/tests/modelview.py
tests/testapp/tests/modelview.py
from django.test import TestCase from towel import deletion from testapp.models import Person, EmailAddress class ModelViewTest(TestCase): def test_list_view(self): for i in range(7): p = Person.objects.create(family_name='Family %r' % i) # paginate_by=5 self.assertContains(self.client.get('/persons/'), 'name="batch_', 5) self.assertContains(self.client.get('/persons/?page=2'), 'name="batch_', 2) self.assertContains(self.client.get(p.get_absolute_url()), 'Family 6') self.assertEqual(self.client.get('/persons/0/').status_code, 404) self.assertEqual(self.client.get('/persons/a/').status_code, 404)
from django.test import TestCase from towel import deletion from testapp.models import Person, EmailAddress class ModelViewTest(TestCase): def test_list_view(self): for i in range(7): p = Person.objects.create(family_name='Family %r' % i) # paginate_by=5 self.assertContains(self.client.get('/persons/'), 'name="batch_', 5) self.assertContains(self.client.get('/persons/?page=2'), 'name="batch_', 2) self.assertContains(self.client.get(p.get_absolute_url()), 'Family 6') self.assertEqual(self.client.get('/persons/0/').status_code, 404) self.assertEqual(self.client.get('/persons/a/').status_code, 404) def test_crud(self): self.assertContains(self.client.get('/persons/add/'), '<form', 1) self.assertEqual(self.client.post('/persons/add/', {}).status_code, 200) response = self.client.post('/persons/add/', { 'family_name': 'Blub', 'given_name': 'Blab', 'created': '2013-01-01 01:00:00', }) person = Person.objects.get() self.assertRedirects(response, person.get_absolute_url()) self.assertContains(self.client.get(person.get_absolute_url()), 'Blab Blub')
Add tests for adding stuff through the ModelView
Add tests for adding stuff through the ModelView
Python
bsd-3-clause
matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel
--- +++ @@ -20,3 +20,17 @@ 'Family 6') self.assertEqual(self.client.get('/persons/0/').status_code, 404) self.assertEqual(self.client.get('/persons/a/').status_code, 404) + + def test_crud(self): + self.assertContains(self.client.get('/persons/add/'), '<form', 1) + self.assertEqual(self.client.post('/persons/add/', {}).status_code, + 200) + response = self.client.post('/persons/add/', { + 'family_name': 'Blub', + 'given_name': 'Blab', + 'created': '2013-01-01 01:00:00', + }) + person = Person.objects.get() + self.assertRedirects(response, person.get_absolute_url()) + self.assertContains(self.client.get(person.get_absolute_url()), + 'Blab Blub')
fa55f976f4a41bfcbf2009e543fd2a0057451c31
metakernel/magics/plot_magic.py
metakernel/magics/plot_magic.py
# Copyright (c) Metakernel Development Team. # Distributed under the terms of the Modified BSD License. from metakernel import Magic, option class PlotMagic(Magic): @option( '-s', '--size', action='store', help='Pixel size of plots, "width,height"' ) @option( '-f', '--format', action='store', default='png', help='Plot format (png, svg or jpg).' ) @option( '-b', '--backend', action='store', default='inline', help='Backend selection' ) def line_plot(self, backend=None, size=None, format=None): def line_plot(self, backend=None, size=None, format=None, resolution=96): """ %plot [options] backend - configure plotting for the session. This line magic will configure the plot settings for this language. Examples: %plot --backend=qt --format=png %plot -b inline -s 640,480 Note: not all languages may support the %plot magic. """ self.kernel.update_plot_settings(backend, size, format, resolution) self.kernel.handle_plot_settings() def register_magics(kernel): kernel.register_magics(PlotMagic)
# Copyright (c) Metakernel Development Team. # Distributed under the terms of the Modified BSD License. from metakernel import Magic, option class PlotMagic(Magic): @option( '-s', '--size', action='store', help='Pixel size of plots, "width,height"' ) @option( '-f', '--format', action='store', default='png', help='Plot format (png, svg or jpg).' ) @option( '-b', '--backend', action='store', default='inline', help='Backend selection' ) @option( '-r', '--resolution', action='store', default=96, help='Resolution in pixels per inch' ) def line_plot(self, backend=None, size=None, format=None, resolution=96): """ %plot [options] backend - configure plotting for the session. This line magic will configure the plot settings for this language. Examples: %plot --backend=qt --format=png %plot -b inline -s 640,480 Note: not all languages may support the %plot magic, and not all options may be supported. """ self.kernel.update_plot_settings(backend, size, format, resolution) self.kernel.handle_plot_settings() def register_magics(kernel): kernel.register_magics(PlotMagic)
Fix the option and update the note
Fix the option and update the note
Python
bsd-3-clause
Calysto/metakernel
--- +++ @@ -18,7 +18,10 @@ '-b', '--backend', action='store', default='inline', help='Backend selection' ) - def line_plot(self, backend=None, size=None, format=None): + @option( + '-r', '--resolution', action='store', default=96, + help='Resolution in pixels per inch' + ) def line_plot(self, backend=None, size=None, format=None, resolution=96): """ %plot [options] backend - configure plotting for the session. @@ -30,7 +33,8 @@ %plot --backend=qt --format=png %plot -b inline -s 640,480 - Note: not all languages may support the %plot magic. + Note: not all languages may support the %plot magic, and not all + options may be supported. """ self.kernel.update_plot_settings(backend, size, format, resolution) self.kernel.handle_plot_settings()
630a8683ba748f130bbb70c285d30142e50cd8ba
playlist_kreator/gmusic.py
playlist_kreator/gmusic.py
from gmusicapi import Mobileclient def create_playlist(playlist_name, artists, email, password, max_top_tracks=2): api = Mobileclient() logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS) if not logged_in: raise Exception('Could not connect') song_ids = [] for artist_name in artists: search = api.search(artist_name) if len(search["artist_hits"]) == 0: print('{}: Does not exist in Google Music. Skipping'.format(artist_name)) else: artist_id = search["artist_hits"][0]["artist"]["artistId"] artist = api.get_artist_info(artist_id, include_albums=False, max_top_tracks=max_top_tracks, max_rel_artist=0) if 'topTracks' not in artist: print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name)) else: song_ids = song_ids + [track['nid'] for track in artist['topTracks']] print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks']))) playlist_id = api.create_playlist(playlist_name) print('\nCreated playlist {} ({})'.format(playlist_name, playlist_id)) api.add_songs_to_playlist(playlist_id, song_ids) print('Added {} songs to the playlist'.format(len(song_ids))) print('All done, well done. Enjoy!')
from gmusicapi import Mobileclient def create_playlist(playlist_name, artists, email, password, max_top_tracks=2): api = Mobileclient() logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS) if not logged_in: raise Exception('Could not connect') song_ids = [] for artist_name in artists: search = api.search(artist_name) if len(search["artist_hits"]) == 0: print('{}: Does not exist in Google Music. Skipping'.format(artist_name)) else: artist_id = search["artist_hits"][0]["artist"]["artistId"] artist = api.get_artist_info(artist_id, include_albums=False, max_top_tracks=max_top_tracks, max_rel_artist=0) if 'topTracks' not in artist: print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name)) else: song_ids = song_ids + [track['nid'] for track in artist['topTracks']] print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks']))) playlist_id = api.create_playlist(playlist_name) print('\nCreated playlist "{}" ({})'.format(playlist_name, playlist_id)) api.add_songs_to_playlist(playlist_id, song_ids) print('Added {} songs to the playlist'.format(len(song_ids))) print('All done. Enjoy! 🤘')
Change logs for google music
Change logs for google music
Python
mit
epayet/playlist_kreator,epayet/playlist_kreator
--- +++ @@ -27,9 +27,8 @@ print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks']))) playlist_id = api.create_playlist(playlist_name) - print('\nCreated playlist {} ({})'.format(playlist_name, playlist_id)) - + print('\nCreated playlist "{}" ({})'.format(playlist_name, playlist_id)) + api.add_songs_to_playlist(playlist_id, song_ids) print('Added {} songs to the playlist'.format(len(song_ids))) - - print('All done, well done. Enjoy!') + print('All done. Enjoy! 🤘')
6a4c6c11cd0d2496ee33b7536b6691a34e1c81a8
salt/returners/redis_return.py
salt/returners/redis_return.py
''' Return data to a redis server This is a VERY simple example for pushing data to a redis server and is not nessisarily intended as a usable interface. ''' import redis import json __opts__ = { 'redis.host': 'mcp', 'redis.port': 6379, 'redis.db': '0', } def returner(ret): ''' Return data to a redis data store ''' serv = redis.Redis( host=__opts__['redis.host'], port=__opts__['redis.port'], db=__opts__['redis.db']) serv.sadd(ret['id'] + 'jobs', ret['jid']) serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return'])) serv.sadd('jobs', ret['jid']) serv.sadd(ret['jid'], ret['id'])
''' Return data to a redis server This is a VERY simple example for pushing data to a redis server and is not nessisarily intended as a usable interface. ''' import redis import json __opts__ = { 'redis.host': 'mcp', 'redis.port': 6379, 'redis.db': '0', } def returner(ret): ''' Return data to a redis data store ''' serv = redis.Redis( host=__opts__['redis.host'], port=__opts__['redis.port'], db=__opts__['redis.db']) serv.sadd(ret['id'] + ':' + 'jobs', ret['jid']) serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return'])) serv.sadd('jobs', ret['jid']) serv.sadd(ret['jid'], ret['id'])
Add : to the id:jobs set key
Add : to the id:jobs set key
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
--- +++ @@ -21,7 +21,7 @@ host=__opts__['redis.host'], port=__opts__['redis.port'], db=__opts__['redis.db']) - serv.sadd(ret['id'] + 'jobs', ret['jid']) + serv.sadd(ret['id'] + ':' + 'jobs', ret['jid']) serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return'])) serv.sadd('jobs', ret['jid']) serv.sadd(ret['jid'], ret['id'])
3bcd36a063b112edb657a739287c6a2db3141746
appolo/models.py
appolo/models.py
from django.db import models class Locatie(models.Model): def __unicode__(self): return self.naam naam = models.CharField(max_length=200) lat = models.FloatField() long = models.FloatField() class Dag(models.Model): def __unicode__(self): return unicode(self.datum) datum = models.DateField() class Activiteit(models.Model): def __unicode__(self): return self.naam naam = models.CharField(max_length=200) begintijd = models.DateTimeField() eindtijd = models.DateTimeField() dag = models.ForeignKey(Dag) locatie = models.ForeignKey(Locatie) class Nieuwsitem(models.Model): def __unicode__(self): return self.titel titel = models.CharField(max_length=200) tekst = models.TextField() class Hashtag(models.Model): def __unicode__(self): return self.tekst tekst = models.CharField(max_length=200)
from django.db import models class Locatie(models.Model): def __unicode__(self): return self.naam naam = models.CharField(max_length=200) lat = models.FloatField() long = models.FloatField() class Meta: verbose_name_plural = 'locaties' class Dag(models.Model): def __unicode__(self): return unicode(self.datum) datum = models.DateField() class Meta: verbose_name_plural = 'dagen' class Activiteit(models.Model): def __unicode__(self): return self.naam naam = models.CharField(max_length=200) begintijd = models.DateTimeField() eindtijd = models.DateTimeField() dag = models.ForeignKey(Dag) locatie = models.ForeignKey(Locatie) class Meta: verbose_name_plural = 'activiteiten' class Nieuwsitem(models.Model): def __unicode__(self): return self.titel titel = models.CharField(max_length=200) tekst = models.TextField() class Meta: verbose_name_plural = 'nieuwsitems' class Hashtag(models.Model): def __unicode__(self): return self.tekst tekst = models.CharField(max_length=200) class Meta: verbose_name_plural = 'hashtags'
Correct meervoud modellen van appolo
Correct meervoud modellen van appolo
Python
mit
jonge-democraten/zues,jonge-democraten/zues,jonge-democraten/zues
--- +++ @@ -7,10 +7,16 @@ lat = models.FloatField() long = models.FloatField() + class Meta: + verbose_name_plural = 'locaties' + class Dag(models.Model): def __unicode__(self): return unicode(self.datum) datum = models.DateField() + + class Meta: + verbose_name_plural = 'dagen' class Activiteit(models.Model): def __unicode__(self): @@ -21,13 +27,22 @@ dag = models.ForeignKey(Dag) locatie = models.ForeignKey(Locatie) + class Meta: + verbose_name_plural = 'activiteiten' + class Nieuwsitem(models.Model): def __unicode__(self): return self.titel titel = models.CharField(max_length=200) tekst = models.TextField() + class Meta: + verbose_name_plural = 'nieuwsitems' + class Hashtag(models.Model): def __unicode__(self): return self.tekst tekst = models.CharField(max_length=200) + + class Meta: + verbose_name_plural = 'hashtags'
fe4a72a49b8291f680c31037baa1c8b33e2ed227
tests/test_carddb.py
tests/test_carddb.py
from hearthstone.enums import CardType, GameTag, Rarity import utils CARDS = utils.fireplace.cards.db def test_all_tags_known(): """ Iterate through the card database and check that all specified GameTags are known in hearthstone.enums.GameTag """ unknown_tags = set() known_tags = list(GameTag) known_rarities = list(Rarity) # Check the db loaded correctly assert utils.fireplace.cards.db for card in CARDS.values(): for tag in card.tags: # We have fake tags in fireplace.enums which are always negative if tag not in known_tags and tag > 0: unknown_tags.add(tag) # Test rarities as well (cf. TB_BlingBrawl_Blade1e in 10956...) assert card.rarity in known_rarities assert not unknown_tags def test_play_scripts(): for card in CARDS.values(): if card.scripts.activate: assert card.type == CardType.HERO_POWER elif card.scripts.play: assert card.type not in (CardType.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT)
from hearthstone.enums import CardType, GameTag, Rarity import utils CARDS = utils.fireplace.cards.db def test_all_tags_known(): """ Iterate through the card database and check that all specified GameTags are known in hearthstone.enums.GameTag """ unknown_tags = set() known_tags = list(GameTag) known_rarities = list(Rarity) # Check the db loaded correctly assert utils.fireplace.cards.db for card in CARDS.values(): for tag in card.tags: # We have fake tags in fireplace.enums which are always negative if tag not in known_tags and tag > 0: unknown_tags.add(tag) # Test rarities as well (cf. TB_BlingBrawl_Blade1e in 10956...) assert card.rarity in known_rarities assert not unknown_tags def test_play_scripts(): for card in CARDS.values(): if card.scripts.activate: assert card.type == CardType.HERO_POWER elif card.scripts.play: assert card.type not in (CardType.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT) def test_card_docstrings(): for card in CARDS.values(): c = utils.fireplace.utils.get_script_definition(card.id) name = c.__doc__ if name is not None: if name.endswith(")"): continue assert name == card.name
Add a test for using card docstrings as names
Add a test for using card docstrings as names
Python
agpl-3.0
NightKev/fireplace,jleclanche/fireplace,beheh/fireplace
--- +++ @@ -36,3 +36,13 @@ assert card.type == CardType.HERO_POWER elif card.scripts.play: assert card.type not in (CardType.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT) + + +def test_card_docstrings(): + for card in CARDS.values(): + c = utils.fireplace.utils.get_script_definition(card.id) + name = c.__doc__ + if name is not None: + if name.endswith(")"): + continue + assert name == card.name
a8571b066634b7e7cbeb35844e1ee0b0d678112c
tests/test_itunes.py
tests/test_itunes.py
""" test_itunes.py Copyright © 2015 Alex Danoff. All Rights Reserved. 2015-08-02 This file tests the functionality provided by the itunes module. """ import unittest from datetime import datetime from itunes.itunes import parse_value, run_applescript from itunes.exceptions import AppleScriptError class ITunesTests(unittest.TestCase): """ Test cases for iTunes functionality. """ def test_parse_value(self): self.assertEquals(parse_value("10"), 10) self.assertEquals(parse_value("1.0"), 1.0) self.assertTrue(parse_value("true")) self.assertFalse(parse_value("false")) self.assertIsNone(parse_value("")) self.assertIsNone(parse_value('""')) self.assertIsNone(parse_value("missing value")) self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \ '5:02:22 PM"'), datetime.fromtimestamp(1268517742)) def test_run_applescript(self): self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \ " APPLESCRIPT")
""" test_itunes.py Copyright © 2015 Alex Danoff. All Rights Reserved. 2015-08-02 This file tests the functionality provided by the itunes module. """ import unittest from datetime import datetime from itunes.itunes import parse_value, run_applescript, play_track from itunes.exceptions import AppleScriptError, TrackError class ITunesTests(unittest.TestCase): """ Test cases for iTunes functionality. """ def test_parse_value(self): self.assertEquals(parse_value("10"), 10) self.assertEquals(parse_value("1.0"), 1.0) self.assertTrue(parse_value("true")) self.assertFalse(parse_value("false")) self.assertIsNone(parse_value("")) self.assertIsNone(parse_value('""')) self.assertIsNone(parse_value("missing value")) self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \ '5:02:22 PM"'), datetime.fromtimestamp(1268517742)) def test_run_applescript(self): self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \ " APPLESCRIPT") def test_play_track(self): self.assertRaises(TrackError, play_track, "~~~~---`-`-`")
Add test for `play_track` function
Add test for `play_track` function New tests make sure that `play_track` raises a TrackError when an invalid track is requested.
Python
mit
adanoff/iTunesTUI
--- +++ @@ -10,8 +10,8 @@ import unittest from datetime import datetime -from itunes.itunes import parse_value, run_applescript -from itunes.exceptions import AppleScriptError +from itunes.itunes import parse_value, run_applescript, play_track +from itunes.exceptions import AppleScriptError, TrackError class ITunesTests(unittest.TestCase): """ @@ -32,3 +32,6 @@ def test_run_applescript(self): self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \ " APPLESCRIPT") + + def test_play_track(self): + self.assertRaises(TrackError, play_track, "~~~~---`-`-`")
2ab1f7687e2cd3b2814ae4c68f48d76fcea4e42b
tests/test_parser.py
tests/test_parser.py
#!/usr/bin/env python # -*- coding: utf-8 -*- line = " 0:00 InitGame: \g_matchmode\1\g_gametype\7\g_allowvote\536871039\g_gear\KQ\mapname\ut4_dust2_v2\gamename\q3urt43\g_survivor\0\auth\0\g_modversion\4.3.4" def test_initgame(): tmp = line.split() assert tmp[1] == "InitGame:" def test_mod43(): ret_val = 40 if "g_modversion\4.3" in line: ret_val = 43 assert ret_val == 43 def test_mod42(): ret_val = 40 if "g_modversion\4.2" in line: ret_val = 42 assert ret_val == 40 def test_ffa_gametype(): ret_val = None if "g_gametype\0" in line: ret_val = "FFA" assert ret_val != "FFA" def test_ctf_gametype(): ret_val = "FFA" if "g_gametype\7" in line: ret_val = "CTF" assert ret_val == "CTF"
#!/usr/bin/env python # -*- coding: utf-8 -*- line = " 0:00 InitGame: \g_matchmode\1\g_gametype\7\g_allowvote\536871039\g_gear\KQ\mapname\ut4_dust2_v2\gamename\q3urt43\g_survivor\0\auth\0\g_modversion\4.3.4" def test_initgame(): tmp = line.split() assert tmp[1] == "InitGame:" def test_mod43(): ret_val = 40 if "g_modversion\4.3" in line: ret_val = 43 assert ret_val == 43 def test_mod42(): ret_val = 40 if "g_modversion\4.2" in line: ret_val = 42 assert ret_val == 40 def test_ffa_gametype(): ret_val = None if "g_gametype\0" in line: ret_val = "FFA" assert ret_val != "FFA" def test_ctf_gametype(): ret_val = "FFA" if "g_gametype\7" in line: ret_val = "CTF" assert ret_val == "CTF" def test_gear_value(): gear = line.split('g_gear\\')[-1].split('\\')[0] if 'g_gear\\' in line else "%s" % '' assert gear == "KQ"
Add test to check gear value
Add test to check gear value
Python
mit
SpunkyBot/spunkybot,SpunkyBot/spunkybot
--- +++ @@ -35,3 +35,8 @@ if "g_gametype\7" in line: ret_val = "CTF" assert ret_val == "CTF" + + +def test_gear_value(): + gear = line.split('g_gear\\')[-1].split('\\')[0] if 'g_gear\\' in line else "%s" % '' + assert gear == "KQ"
9837d14d4c9a1fa85d6dd122ebbdda6a6b559087
tests/test_travis.py
tests/test_travis.py
import unittest import permstruct import permstruct.dag class TestTravis(unittest.TestCase): def test_travis(self): perm_prop = lambda p: p.avoids([2,3,1]) perm_bound = 6 inp_dag = permstruct.dag.incr_decr(perm_prop, perm_bound) sol_iter = permstruct.construct_rule(perm_prop, perm_bound, inp_dag, (3, 3), 4, 100) for sol in sol_iter: print '====================================' print "" for rule in sol: print(rule) print ""
import unittest import permstruct import permstruct.dag import sys class TestTravis(unittest.TestCase): def test_travis(self): perm_prop = lambda p: p.avoids([2,3,1]) perm_bound = 6 inp_dag = permstruct.dag.incr_decr(perm_prop, perm_bound) sol_iter = permstruct.construct_rule(perm_prop, perm_bound, inp_dag, (3, 3), 4, 100) for sol in sol_iter: sys.stdout.write('====================================\n') sys.stdout.write('\n') for rule in sol: sys.stdout.write('%s\n\n' % rule)
Make tests working with python 2 and 3
Make tests working with python 2 and 3
Python
bsd-3-clause
PermutaTriangle/PermStruct
--- +++ @@ -1,6 +1,7 @@ import unittest import permstruct import permstruct.dag +import sys class TestTravis(unittest.TestCase): def test_travis(self): @@ -10,9 +11,8 @@ sol_iter = permstruct.construct_rule(perm_prop, perm_bound, inp_dag, (3, 3), 4, 100) for sol in sol_iter: - print '====================================' - print "" + sys.stdout.write('====================================\n') + sys.stdout.write('\n') for rule in sol: - print(rule) - print "" + sys.stdout.write('%s\n\n' % rule)
267076bba8b28f82da4d714d5ee3babc4d24b8da
voteswap/forms.py
voteswap/forms.py
from django import forms from polling.models import CANDIDATES from polling.models import CANDIDATES_THIRD_PARTY from polling.models import STATES class LandingPageForm(forms.Form): state = forms.ChoiceField(choices=STATES) preferred_candidate = forms.ChoiceField(choices=CANDIDATES) second_candidate = forms.ChoiceField(choices=CANDIDATES_THIRD_PARTY) reason = forms.Textarea()
from django import forms from polling.models import CANDIDATES from polling.models import CANDIDATES_THIRD_PARTY from polling.models import STATES class LandingPageForm(forms.Form): state = forms.ChoiceField(choices=STATES) preferred_candidate = forms.ChoiceField(choices=CANDIDATES) second_candidate = forms.ChoiceField( choices=CANDIDATES_THIRD_PARTY, required=False) reason = forms.Textarea()
Make second_candidate not required in sign up form
Make second_candidate not required in sign up form
Python
mit
sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap
--- +++ @@ -8,5 +8,7 @@ class LandingPageForm(forms.Form): state = forms.ChoiceField(choices=STATES) preferred_candidate = forms.ChoiceField(choices=CANDIDATES) - second_candidate = forms.ChoiceField(choices=CANDIDATES_THIRD_PARTY) + second_candidate = forms.ChoiceField( + choices=CANDIDATES_THIRD_PARTY, + required=False) reason = forms.Textarea()
373f0f4637103d526c75cae304740e621ad3c39c
resize.py
resize.py
# -*- coding: utf-8 -*- import cv2 import sys import numpy as np def resize(src, w_ratio, h_ratio): height = src.shape[0] width = src.shape[1] dst = cv2.resize(src,(width/100*w_ratio,height/100*h_ratio)) return dst if __name__ == '__main__': param = sys.argv if (len(param) != 4): print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio") quit() # open image file try: input_img = cv2.imread(param[1]) except: print ('faild to load %s' % param[1]) quit() if input_img is None: print ('faild to load %s' % param[1]) quit() w_ratio = int(param[2]) h_ratio = int(param[3]) output_img = resize(input_img, w_ratio, h_ratio) cv2.imwrite(param[1], output_img)
# -*- coding: utf-8 -*- import cv2 import sys def resize(src, w_ratio, h_ratio): height = src.shape[0] width = src.shape[1] dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio))) return dst if __name__ == '__main__': param = sys.argv if (len(param) != 4): print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio") quit() # open image file try: input_img = cv2.imread(param[1]) except: print ('faild to load %s' % param[1]) quit() if input_img is None: print ('faild to load %s' % param[1]) quit() w_ratio = int(param[2]) h_ratio = int(param[3]) output_img = resize(input_img, w_ratio, h_ratio) cv2.imwrite(param[1], output_img)
Fix bug and delete unused library
Fix bug and delete unused library
Python
mit
karaage0703/python-image-processing,karaage0703/python-image-processing
--- +++ @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- import cv2 import sys -import numpy as np def resize(src, w_ratio, h_ratio): height = src.shape[0] width = src.shape[1] - dst = cv2.resize(src,(width/100*w_ratio,height/100*h_ratio)) + dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio))) return dst if __name__ == '__main__':
1c231a8ef54af82d8ec03b828856ddac619fd345
knights/compat/django.py
knights/compat/django.py
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token))
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) @register.helper def safe(value): return str(value)
Add a dummy safe filter for Django compat
Add a dummy safe filter for Django compat
Python
mit
funkybob/knights-templater,funkybob/knights-templater
--- +++ @@ -18,3 +18,8 @@ @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) + + +@register.helper +def safe(value): + return str(value)
5d8555ffc9a4b0549d32161a79aada3857b9d639
webapp/graphite/events/models.py
webapp/graphite/events/models.py
import time import os from django.db import models from django.contrib import admin if os.environ.get('READTHEDOCS'): TagField = lambda *args, **kwargs: None else: from tagging.fields import TagField class Event(models.Model): class Admin: pass when = models.DateTimeField() what = models.CharField(max_length=255) data = models.TextField(blank=True) tags = TagField(default="") def get_tags(self): return Tag.objects.get_for_object(self) def __str__(self): return "%s: %s" % (self.when, self.what) @staticmethod def find_events(time_from=None, time_until=None, tags=None): query = Event.objects.all() if time_from is not None: query = query.filter(when__gte=time_from) if time_until is not None: query = query.filter(when__lte=time_until) if tags is not None: for tag in tags: query = query.filter(tags__iregex=r'\b%s\b' % tag) result = list(query.order_by("when")) return result def as_dict(self): return dict( when=self.when, what=self.what, data=self.data, tags=self.tags, id=self.id, )
import time import os from django.db import models from django.contrib import admin from tagging.managers import ModelTaggedItemManager if os.environ.get('READTHEDOCS'): TagField = lambda *args, **kwargs: None else: from tagging.fields import TagField class Event(models.Model): class Admin: pass when = models.DateTimeField() what = models.CharField(max_length=255) data = models.TextField(blank=True) tags = TagField(default="") def get_tags(self): return Tag.objects.get_for_object(self) def __str__(self): return "%s: %s" % (self.when, self.what) @staticmethod def find_events(time_from=None, time_until=None, tags=None): if tags is not None: query = Event.tagged.with_all(tags) else: query = Event.objects.all() if time_from is not None: query = query.filter(when__gte=time_from) if time_until is not None: query = query.filter(when__lte=time_until) result = list(query.order_by("when")) return result def as_dict(self): return dict( when=self.when, what=self.what, data=self.data, tags=self.tags, id=self.id, ) # We use this rather than tagging.register() so that tags can be exposed # in the admin UI ModelTaggedItemManager().contribute_to_class(Event, 'tagged')
Fix events to work on mysql
Fix events to work on mysql Closes https://bugs.launchpad.net/graphite/+bug/993625
Python
apache-2.0
jssjr/graphite-web,mleinart/graphite-web,DanCech/graphite-web,bruce-lyft/graphite-web,dhtech/graphite-web,cgvarela/graphite-web,ceph/graphite-web,AICIDNN/graphite-web,kkdk5535/graphite-web,axibase/graphite-web,graphite-project/graphite-web,g76r/graphite-web,redice/graphite-web,section-io/graphite-web,cloudant/graphite-web,mcoolive/graphite-web,JeanFred/graphite-web,disqus/graphite-web,dbn/graphite-web,ZelunZhang/graphite-web,johnseekins/graphite-web,blacked/graphite-web,disqus/graphite-web,goir/graphite-web,deniszh/graphite-web,gwaldo/graphite-web,esnet/graphite-web,Krylon360/evernote-graphite-web,cbowman0/graphite-web,redice/graphite-web,nkhuyu/graphite-web,synedge/graphite-web,slackhappy/graphite-web,pu239ppy/graphite-web,Invoca/graphite-web,markolson/graphite-web,cosm0s/graphite-web,cbowman0/graphite-web,piotr1212/graphite-web,section-io/graphite-web,bruce-lyft/graphite-web,Skyscanner/graphite-web,cosm0s/graphite-web,EinsamHauer/graphite-web-iow,deniszh/graphite-web,bpaquet/graphite-web,deniszh/graphite-web,bmhatfield/graphite-web,0x20h/graphite-web,ZelunZhang/graphite-web,SEJeff/graphite-web,zuazo-forks/graphite-web,axibase/graphite-web,redice/graphite-web,afilipovich/graphite-web,DanCech/graphite-web,cgvarela/graphite-web,MjAbuz/graphite-web,cosm0s/graphite-web,criteo-forks/graphite-web,nkhuyu/graphite-web,Krylon360/vimeo-graphite-web,penpen/graphite-web,ceph/graphite-web,afilipovich/graphite-web,Aloomaio/graphite-web,goir/graphite-web,kkdk5535/graphite-web,Invoca/graphite-web,penpen/graphite-web,Krylon360/evernote-graphite-web,bmhatfield/graphite-web,cgvarela/graphite-web,axibase/graphite-web,JeanFred/graphite-web,cgvarela/graphite-web,EinsamHauer/graphite-web-iow,Squarespace/graphite-web,axibase/graphite-web,g76r/graphite-web,Invoca/graphite-web,afilipovich/graphite-web,edwardmlyte/graphite-web,penpen/graphite-web,brutasse/graphite-web,mleinart/graphite-web,kkdk5535/graphite-web,nkhuyu/graphite-web,evernote/graphite-web,bpaquet/graphite-web,zuazo-forks/graphite-web,blacked/graphite-web,bbc/graphite-web,graphite-server/graphite-web,piotr1212/graphite-web,atnak/graphite-web,0x20h/graphite-web,AICIDNN/graphite-web,mcoolive/graphite-web,brutasse/graphite-web,dhtech/graphite-web,evernote/graphite-web,ceph/graphite-web,Krylon360/vimeo-graphite-web,DanCech/graphite-web,deniszh/graphite-web,penpen/graphite-web,cbowman0/graphite-web,graphite-server/graphite-web,krux/graphite-web,synedge/graphite-web,krux/graphite-web,johnseekins/graphite-web,afilipovich/graphite-web,phreakocious/graphite-web,EinsamHauer/graphite-web-iow,goir/graphite-web,bmhatfield/graphite-web,kkdk5535/graphite-web,drax68/graphite-web,SEJeff/graphite-web,johnseekins/graphite-web,JeanFred/graphite-web,atnak/graphite-web,Krylon360/vimeo-graphite-web,SEJeff/graphite-web,synedge/graphite-web,pu239ppy/graphite-web,obfuscurity/graphite-web,JeanFred/graphite-web,Squarespace/graphite-web,graphite-project/graphite-web,evernote/graphite-web,redice/graphite-web,blacked/graphite-web,brutasse/graphite-web,mleinart/graphite-web,markolson/graphite-web,cybem/graphite-web-iow,Squarespace/graphite-web,brutasse/graphite-web,afilipovich/graphite-web,cybem/graphite-web-iow,mcoolive/graphite-web,drax68/graphite-web,bmhatfield/graphite-web,bbc/graphite-web,Krylon360/evernote-graphite-web,zBMNForks/graphite-web,cosm0s/graphite-web,edwardmlyte/graphite-web,dbn/graphite-web,slackhappy/graphite-web,phreakocious/graphite-web,graphite-project/graphite-web,criteo-forks/graphite-web,Skyscanner/graphite-web,Squarespace/graphite-web,gwaldo/graphite-web,edwardmlyte/graphite-web,krux/graphite-web,Krylon360/evernote-graphite-web,mcoolive/graphite-web,Squarespace/graphite-web,zuazo-forks/graphite-web,Invoca/graphite-web,0x20h/graphite-web,atnak/graphite-web,atnak/graphite-web,criteo-forks/graphite-web,Skyscanner/graphite-web,pcn/graphite-web,SEJeff/graphite-web,cosm0s/graphite-web,drax68/graphite-web,atnak/graphite-web,DanCech/graphite-web,Krylon360/vimeo-graphite-web,johnseekins/graphite-web,pcn/graphite-web,esnet/graphite-web,bmhatfield/graphite-web,zBMNForks/graphite-web,markolson/graphite-web,axibase/graphite-web,piotr1212/graphite-web,cbowman0/graphite-web,bbc/graphite-web,markolson/graphite-web,dbn/graphite-web,deniszh/graphite-web,blacked/graphite-web,SEJeff/graphite-web,bruce-lyft/graphite-web,AICIDNN/graphite-web,Aloomaio/graphite-web,bpaquet/graphite-web,dhtech/graphite-web,atnak/graphite-web,section-io/graphite-web,bruce-lyft/graphite-web,cloudant/graphite-web,pcn/graphite-web,DanCech/graphite-web,graphite-project/graphite-web,Skyscanner/graphite-web,JeanFred/graphite-web,disqus/graphite-web,section-io/graphite-web,krux/graphite-web,pu239ppy/graphite-web,jssjr/graphite-web,zBMNForks/graphite-web,lyft/graphite-web,bruce-lyft/graphite-web,Squarespace/graphite-web,graphite-server/graphite-web,pu239ppy/graphite-web,g76r/graphite-web,disqus/graphite-web,lyft/graphite-web,goir/graphite-web,Krylon360/vimeo-graphite-web,nkhuyu/graphite-web,dbn/graphite-web,ZelunZhang/graphite-web,obfuscurity/graphite-web,deniszh/graphite-web,johnseekins/graphite-web,zuazo-forks/graphite-web,graphite-server/graphite-web,mcoolive/graphite-web,Krylon360/evernote-graphite-web,disqus/graphite-web,bruce-lyft/graphite-web,lyft/graphite-web,graphite-server/graphite-web,esnet/graphite-web,johnseekins/graphite-web,zBMNForks/graphite-web,edwardmlyte/graphite-web,cybem/graphite-web-iow,brutasse/graphite-web,lfckop/graphite-web,blacked/graphite-web,Aloomaio/graphite-web,lfckop/graphite-web,bmhatfield/graphite-web,jssjr/graphite-web,Krylon360/evernote-graphite-web,lyft/graphite-web,Talkdesk/graphite-web,g76r/graphite-web,0x20h/graphite-web,slackhappy/graphite-web,cybem/graphite-web-iow,Talkdesk/graphite-web,obfuscurity/graphite-web,cloudant/graphite-web,pcn/graphite-web,mleinart/graphite-web,Aloomaio/graphite-web,evernote/graphite-web,piotr1212/graphite-web,Talkdesk/graphite-web,Talkdesk/graphite-web,obfuscurity/graphite-web,drax68/graphite-web,esnet/graphite-web,dhtech/graphite-web,cloudant/graphite-web,dbn/graphite-web,MjAbuz/graphite-web,jssjr/graphite-web,ZelunZhang/graphite-web,bpaquet/graphite-web,cloudant/graphite-web,g76r/graphite-web,zBMNForks/graphite-web,redice/graphite-web,markolson/graphite-web,mleinart/graphite-web,section-io/graphite-web,cybem/graphite-web-iow,synedge/graphite-web,Aloomaio/graphite-web,axibase/graphite-web,lfckop/graphite-web,kkdk5535/graphite-web,Krylon360/vimeo-graphite-web,gwaldo/graphite-web,penpen/graphite-web,edwardmlyte/graphite-web,section-io/graphite-web,nkhuyu/graphite-web,evernote/graphite-web,lfckop/graphite-web,nkhuyu/graphite-web,dbn/graphite-web,jssjr/graphite-web,lyft/graphite-web,synedge/graphite-web,MjAbuz/graphite-web,Skyscanner/graphite-web,brutasse/graphite-web,Skyscanner/graphite-web,obfuscurity/graphite-web,EinsamHauer/graphite-web-iow,lfckop/graphite-web,zuazo-forks/graphite-web,krux/graphite-web,bbc/graphite-web,0x20h/graphite-web,slackhappy/graphite-web,ZelunZhang/graphite-web,lyft/graphite-web,pu239ppy/graphite-web,Talkdesk/graphite-web,ZelunZhang/graphite-web,phreakocious/graphite-web,zBMNForks/graphite-web,Invoca/graphite-web,EinsamHauer/graphite-web-iow,drax68/graphite-web,criteo-forks/graphite-web,dhtech/graphite-web,graphite-project/graphite-web,cbowman0/graphite-web,gwaldo/graphite-web,kkdk5535/graphite-web,pu239ppy/graphite-web,criteo-forks/graphite-web,gwaldo/graphite-web,edwardmlyte/graphite-web,drax68/graphite-web,goir/graphite-web,graphite-server/graphite-web,jssjr/graphite-web,Aloomaio/graphite-web,esnet/graphite-web,ceph/graphite-web,criteo-forks/graphite-web,MjAbuz/graphite-web,cgvarela/graphite-web,phreakocious/graphite-web,krux/graphite-web,mcoolive/graphite-web,JeanFred/graphite-web,blacked/graphite-web,slackhappy/graphite-web,piotr1212/graphite-web,MjAbuz/graphite-web,AICIDNN/graphite-web,EinsamHauer/graphite-web-iow,AICIDNN/graphite-web,graphite-project/graphite-web,gwaldo/graphite-web,Talkdesk/graphite-web,piotr1212/graphite-web,bpaquet/graphite-web,cosm0s/graphite-web,MjAbuz/graphite-web,penpen/graphite-web,bpaquet/graphite-web,phreakocious/graphite-web,obfuscurity/graphite-web,AICIDNN/graphite-web,ceph/graphite-web,cbowman0/graphite-web,disqus/graphite-web,g76r/graphite-web,DanCech/graphite-web,bbc/graphite-web,phreakocious/graphite-web,redice/graphite-web,synedge/graphite-web,cgvarela/graphite-web,goir/graphite-web,Invoca/graphite-web,lfckop/graphite-web,pcn/graphite-web,cybem/graphite-web-iow
--- +++ @@ -3,6 +3,7 @@ from django.db import models from django.contrib import admin +from tagging.managers import ModelTaggedItemManager if os.environ.get('READTHEDOCS'): TagField = lambda *args, **kwargs: None @@ -25,7 +26,11 @@ @staticmethod def find_events(time_from=None, time_until=None, tags=None): - query = Event.objects.all() + + if tags is not None: + query = Event.tagged.with_all(tags) + else: + query = Event.objects.all() if time_from is not None: query = query.filter(when__gte=time_from) @@ -33,9 +38,6 @@ if time_until is not None: query = query.filter(when__lte=time_until) - if tags is not None: - for tag in tags: - query = query.filter(tags__iregex=r'\b%s\b' % tag) result = list(query.order_by("when")) return result @@ -48,3 +50,7 @@ tags=self.tags, id=self.id, ) + +# We use this rather than tagging.register() so that tags can be exposed +# in the admin UI +ModelTaggedItemManager().contribute_to_class(Event, 'tagged')
32116cf93b30fc63394379b49e921f9e0ab2f652
django_filepicker/widgets.py
django_filepicker/widgets.py
from django.conf import settings from django.forms import widgets #JS_URL is the url to the filepicker.io javascript library JS_VERSION = 0 JS_URL = "//api.filepicker.io/v%d/filepicker.js" % (JS_VERSION) if hasattr(settings, 'FILEPICKER_INPUT_TYPE'): INPUT_TYPE = settings.FILEPICKER_INPUT_TYPE else: INPUT_TYPE = 'filepicker-dragdrop' class FPFileWidget(widgets.Input): input_type = INPUT_TYPE needs_multipart_form = False def value_from_datadict_old(self, data, files, name): #If we are using the middleware, then the data will already be #in FILES, if not it will be in POST if name not in data: return super(FPFileWidget, self).value_from_datadict( data, files, name) return data class Media: js = (JS_URL,)
from django.conf import settings from django.forms import widgets #JS_URL is the url to the filepicker.io javascript library JS_VERSION = 1 JS_URL = "//api.filepicker.io/v%d/filepicker.js" % (JS_VERSION) if hasattr(settings, 'FILEPICKER_INPUT_TYPE'): INPUT_TYPE = settings.FILEPICKER_INPUT_TYPE else: INPUT_TYPE = 'filepicker-dragdrop' class FPFileWidget(widgets.Input): input_type = INPUT_TYPE needs_multipart_form = False def value_from_datadict_old(self, data, files, name): #If we are using the middleware, then the data will already be #in FILES, if not it will be in POST if name not in data: return super(FPFileWidget, self).value_from_datadict( data, files, name) return data class Media: js = (JS_URL,)
Use version 1 of Filepicker.js
Use version 1 of Filepicker.js
Python
mit
filepicker/filepicker-django,filepicker/filepicker-django,FundedByMe/filepicker-django,FundedByMe/filepicker-django
--- +++ @@ -2,7 +2,7 @@ from django.forms import widgets #JS_URL is the url to the filepicker.io javascript library -JS_VERSION = 0 +JS_VERSION = 1 JS_URL = "//api.filepicker.io/v%d/filepicker.js" % (JS_VERSION) if hasattr(settings, 'FILEPICKER_INPUT_TYPE'):
767cf250a23c164cfcf7d6eba5a48116e3b111e5
app/admin/forms.py
app/admin/forms.py
from flask_pagedown.fields import PageDownField from wtforms.fields import StringField from wtforms.validators import DataRequired from app.models import Post, Tag from app.utils.helpers import get_or_create from app.utils.forms import RedirectForm from app.utils.fields import TagListField class PostForm(RedirectForm): title = StringField('Title', [DataRequired()]) short_text = PageDownField('Short text (displayed as preview)') long_text = PageDownField('Long text') tags = TagListField('Tags (separated by comma)') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.obj = kwargs.get('obj') def save(self): if not self.obj: self.obj = Post() self.populate_obj(self.obj) self.obj.tags = [get_or_create(Tag, name=tag)[0] for tag in self.tags.data] self.obj.save() def populate_obj(self, obj): for name, field in self._fields.items(): if name not in ('next', 'tags'): field.populate_obj(obj, name)
from flask_pagedown.fields import PageDownField from wtforms.fields import StringField from wtforms.validators import DataRequired from app.models import Post, Tag from app.utils.helpers import get_or_create from app.utils.forms import RedirectForm from app.utils.fields import TagListField class PostForm(RedirectForm): title = StringField('Title', [DataRequired()]) short_text = PageDownField('Short text (displayed as preview)') long_text = PageDownField('Long text') tags = TagListField('Tags (separated by comma)') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.obj = kwargs.get('obj') def save(self): """ Saves the Post object. Returns: The Post object """ if not self.obj: self.obj = Post() self.populate_obj(self.obj) self.obj.tags = [get_or_create(Tag, name=tag)[0] for tag in self.tags.data] return self.obj.save() def populate_obj(self, obj): for name, field in self._fields.items(): if name not in ('next', 'tags'): field.populate_obj(obj, name)
Return Post object in PostForm save method
Return Post object in PostForm save method
Python
mit
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
--- +++ @@ -19,12 +19,20 @@ self.obj = kwargs.get('obj') def save(self): + """ + Saves the Post object. + + Returns: + The Post object + + """ + if not self.obj: self.obj = Post() self.populate_obj(self.obj) self.obj.tags = [get_or_create(Tag, name=tag)[0] for tag in self.tags.data] - self.obj.save() + return self.obj.save() def populate_obj(self, obj): for name, field in self._fields.items():
1ba620c9e9459b935bb59ed55dbb3fc6c14b1994
edi/units/__init__.py
edi/units/__init__.py
# Copyright 2017 John Reese # Licensed under the MIT license import os.path import logging from pathlib import Path from importlib import import_module from types import ModuleType from typing import List log = logging.getLogger(__name__) def import_units() -> List[ModuleType]: """Find and import units in this path.""" modules: List[ModuleType] = [] root = Path(Path(__file__).parent) # appease mypy, Path.parents -> PurePath log.debug(f"Searching for units in {root}...") for path in root.glob("*.py"): name = path.stem log.debug(f"Loading unit {name}") module = import_module(f"edi.units.{name}") modules.append(module) return modules
# Copyright 2017 John Reese # Licensed under the MIT license import os.path import logging from pathlib import Path from importlib import import_module from types import ModuleType from typing import List log = logging.getLogger(__name__) def import_units(root: Path = None) -> List[ModuleType]: """Find and import units in this path.""" modules: List[ModuleType] = [] if root is None: root = Path(__file__) if not root.is_dir(): root = Path(root.parent) # appease mypy, Path.parents -> PurePath log.debug(f"Searching for units in {root}...") for path in root.glob("*.py"): name = path.stem if name.startswith("_"): continue log.debug(f"Loading unit {name}") module = import_module(f"edi.units.{name}") modules.append(module) return modules
Generalize import_units to enable custom directories
Generalize import_units to enable custom directories
Python
mit
jreese/edi
--- +++ @@ -12,13 +12,20 @@ log = logging.getLogger(__name__) -def import_units() -> List[ModuleType]: +def import_units(root: Path = None) -> List[ModuleType]: """Find and import units in this path.""" modules: List[ModuleType] = [] - root = Path(Path(__file__).parent) # appease mypy, Path.parents -> PurePath + + if root is None: + root = Path(__file__) + if not root.is_dir(): + root = Path(root.parent) # appease mypy, Path.parents -> PurePath + log.debug(f"Searching for units in {root}...") for path in root.glob("*.py"): name = path.stem + if name.startswith("_"): + continue log.debug(f"Loading unit {name}") module = import_module(f"edi.units.{name}") modules.append(module)
6d9ad75ca3ac9a5ed9aac33e56a4809fc7e37f54
gignore/__init__.py
gignore/__init__.py
__version__ = (2014, 10, 0) def get_version(): """ :rtype: str """ return '.'.join(str(i) for i in __version__) class Gignore(object): BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/' name = None file_content = None valid = True def get_base_url(self): """ :rtype: str """ return self.BASE_URL def set_name(self, name): """ :type name: str """ self.name = name def get_name(self): """ :rtype: str """ return self.name def set_file_content(self, file_content): """ :type file_content: str """ self.file_content = file_content def get_file_content(self): """ :rtype: str """ return self.file_content def is_valid(self): """ :rtype: bool """ return self.valid def set_valid(self, valid): """ :type valid: bool """ self.valid = valid
__version__ = (2014, 10, 0) def get_version(): """ :rtype: str """ return '.'.join(str(i) for i in __version__) class Gignore(object): BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/' name = None file_content = None valid = True errors = [] def get_base_url(self): """ :rtype: str """ return self.BASE_URL def set_name(self, name): """ :type name: str """ self.name = name def get_name(self): """ :rtype: str """ return self.name def set_file_content(self, file_content): """ :type file_content: str """ self.file_content = file_content def get_file_content(self): """ :rtype: str """ return self.file_content def is_valid(self): """ :rtype: bool """ return self.valid def set_valid(self, valid): """ :type valid: bool """ self.valid = valid def add_error(self, error_message): """ :type error_message: str """ self.errors.append(error_message) def get_errors(self): """ :rtype: list of str """ return self.errors
Add errors attribute with setter/getter
Add errors attribute with setter/getter
Python
bsd-3-clause
Alir3z4/python-gignore
--- +++ @@ -13,6 +13,7 @@ name = None file_content = None valid = True + errors = [] def get_base_url(self): """ @@ -55,3 +56,15 @@ :type valid: bool """ self.valid = valid + + def add_error(self, error_message): + """ + :type error_message: str + """ + self.errors.append(error_message) + + def get_errors(self): + """ + :rtype: list of str + """ + return self.errors
7141cedd5667b373e3dd5a723052de42ea5dfa10
ome/terminal.py
ome/terminal.py
import sys import os ansi_colour_list = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'] ansi_colour_code = dict((name, str(code)) for code, name in enumerate(ansi_colour_list, 30)) def is_ansi_terminal(file): return ((sys.platform != 'win32' or 'ANSICON' in os.environ) and hasattr(file, 'isatty') and file.isatty()) class MaybeAnsiTerminal(object): def __init__(self, file): self._file = file self.is_ansi = is_ansi_terminal(file) def __getattr__(self, attr): return getattr(self._file, attr) def write_ansi_code(self, code): if self.is_ansi: self._file.write('\x1B[' + code) def reset(self): self.write_ansi_code('0m') def bold(self): self.write_ansi_code('1m') def colour(self, name): self.write_ansi_code(ansi_colour_code[name] + 'm') stdout = MaybeAnsiTerminal(sys.stdout) stderr = MaybeAnsiTerminal(sys.stderr)
import sys import os ansi_colour_list = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'] ansi_colour_code = dict((name, '{}m'.format(code)) for code, name in enumerate(ansi_colour_list, 30)) def is_ansi_terminal(file): return ((sys.platform != 'win32' or 'ANSICON' in os.environ) and hasattr(file, 'isatty') and file.isatty()) class MaybeAnsiTerminal(object): def __init__(self, file): self._file = file self.is_ansi = is_ansi_terminal(file) def __getattr__(self, attr): return getattr(self._file, attr) def write_ansi_code(self, code): if self.is_ansi: self._file.write('\x1B[' + code) def reset(self): self.write_ansi_code('0m') def bold(self): self.write_ansi_code('1m') def colour(self, name): self.write_ansi_code(ansi_colour_code[name]) stdout = MaybeAnsiTerminal(sys.stdout) stderr = MaybeAnsiTerminal(sys.stderr)
Include m in colour code string instead of appending it later.
Include m in colour code string instead of appending it later.
Python
mit
shaurz/ome,shaurz/ome
--- +++ @@ -2,7 +2,7 @@ import os ansi_colour_list = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'] -ansi_colour_code = dict((name, str(code)) for code, name in enumerate(ansi_colour_list, 30)) +ansi_colour_code = dict((name, '{}m'.format(code)) for code, name in enumerate(ansi_colour_list, 30)) def is_ansi_terminal(file): return ((sys.platform != 'win32' or 'ANSICON' in os.environ) @@ -27,7 +27,7 @@ self.write_ansi_code('1m') def colour(self, name): - self.write_ansi_code(ansi_colour_code[name] + 'm') + self.write_ansi_code(ansi_colour_code[name]) stdout = MaybeAnsiTerminal(sys.stdout) stderr = MaybeAnsiTerminal(sys.stderr)
01b1b649539beb41073e7df427d6f4622d687a5d
tests/django_settings.py
tests/django_settings.py
# Minimum settings that are needed to run django test suite import os import secrets import tempfile SECRET_KEY = secrets.token_hex() if "postgresql" in os.getenv("TOX_ENV_NAME", "") or os.getenv("TEST_DATABASE") == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'dirtyfields_test', 'USER': os.getenv('POSTGRES_USER', 'postgres'), 'PASSWORD': os.getenv('POSTGRES_PASSWORD', 'postgres'), 'HOST': 'localhost', 'PORT': '5432', # default postgresql port } } else: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'dirtyfields.db', } } INSTALLED_APPS = ('tests', ) MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
# Minimum settings that are needed to run django test suite import os import secrets import tempfile USE_TZ = True SECRET_KEY = secrets.token_hex() if "postgresql" in os.getenv("TOX_ENV_NAME", "") or os.getenv("TEST_DATABASE") == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'dirtyfields_test', 'USER': os.getenv('POSTGRES_USER', 'postgres'), 'PASSWORD': os.getenv('POSTGRES_PASSWORD', 'postgres'), 'HOST': 'localhost', 'PORT': '5432', # default postgresql port } } else: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'dirtyfields.db', } } INSTALLED_APPS = ('tests', ) MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
Set USE_TZ=True, django 4 raises a warning if this is not set to a value
Set USE_TZ=True, django 4 raises a warning if this is not set to a value
Python
bsd-3-clause
romgar/django-dirtyfields
--- +++ @@ -3,6 +3,7 @@ import secrets import tempfile +USE_TZ = True SECRET_KEY = secrets.token_hex() if "postgresql" in os.getenv("TOX_ENV_NAME", "") or os.getenv("TEST_DATABASE") == "postgres":
54dc5c3a6ddf7fdc630547836058d017c778008f
python/recursive-digit-sum.py
python/recursive-digit-sum.py
#!/bin/python3 def superDigit(n, k): p = create_p(n, k) return get_super_digit(p) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) def create_p(n, k): return n * k if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = superDigit(n, k) print(result)
#!/bin/python3 def super_digit(n, k): digits = map(int, list(n)) return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = super_digit(n, k) print(result)
Implement shortcut to compute initial p super digit
Implement shortcut to compute initial p super digit
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
--- +++ @@ -1,8 +1,8 @@ #!/bin/python3 -def superDigit(n, k): - p = create_p(n, k) - return get_super_digit(p) +def super_digit(n, k): + digits = map(int, list(n)) + return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): if len(p) == 1: @@ -11,12 +11,9 @@ digits = map(int, list(p)) return get_super_digit(str(sum(digits))) -def create_p(n, k): - return n * k - if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) - result = superDigit(n, k) + result = super_digit(n, k) print(result)
5d09fef9ee1f6b8627e372695a93be3236820f46
app/main/errors.py
app/main/errors.py
from flask import jsonify from . import main from ..models import ValidationError @main.app_errorhandler(ValidationError) def validatation_error(e): return jsonify(error=e.message), 400 def generic_error_handler(e): # TODO: log the error headers = [] error = e.description if e.code == 401: headers = [('WWW-Authenticate', 'Bearer')] elif e.code == 500: error = "Internal error" return jsonify(error=error), e.code, headers for code in range(400, 599): main.app_errorhandler(code)(generic_error_handler)
from flask import jsonify from . import main from ..models import ValidationError @main.app_errorhandler(ValidationError) def validatation_error(e): return jsonify(error=e.message), 400 def generic_error_handler(e): headers = [] code = getattr(e, 'code', 500) error = getattr(e, 'description', 'Internal error') if code == 401: headers = [('WWW-Authenticate', 'Bearer')] elif code == 500: error = "Internal error" return jsonify(error=error), code, headers for code in range(400, 599): main.app_errorhandler(code)(generic_error_handler)
Fix app error handler raising an attribute error
Fix app error handler raising an attribute error We're using a single error handler to return a JSON response for any error code. The handler expects a flask HTTP error exception with `.code` and `.description` attributes (like the ones raised by `abort`). However, if the app raises an exception that's not handled by the application code the error handler is called with the original exception object instead. Depending on the exception, that object might not contain code or description attributes. In this case, an AttributeError in the error handler itself would kill the WSGI worker and the application would fail to respond to the request (leading to a 502 from the nginx proxy). Replacing attribute access with `getattr` allows us to set the default values to a 500 response with 'Internal error' for all non-HTTP exceptions. We still get the error details in the logs, but we don't want to display any additional information in the HTTP response. Note: error handlers for HTTP 500 code are not triggered by Flask in DEBUG mode, so this code usually doesn't run locally.
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
--- +++ @@ -10,15 +10,16 @@ def generic_error_handler(e): - # TODO: log the error headers = [] - error = e.description - if e.code == 401: + code = getattr(e, 'code', 500) + error = getattr(e, 'description', 'Internal error') + + if code == 401: headers = [('WWW-Authenticate', 'Bearer')] - elif e.code == 500: + elif code == 500: error = "Internal error" - return jsonify(error=error), e.code, headers + return jsonify(error=error), code, headers for code in range(400, 599):
c313d6fb6803edabb956e1e90f040f8518c334bf
app/main/errors.py
app/main/errors.py
from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html"), 404
from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html", **main.config['BASE_TEMPLATE_DATA']), 404
Fix 404 page template static resources
Fix 404 page template static resources
Python
mit
mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
--- +++ @@ -4,4 +4,5 @@ @main.app_errorhandler(404) def page_not_found(e): - return render_template("404.html"), 404 + return render_template("404.html", + **main.config['BASE_TEMPLATE_DATA']), 404
3fe6d183b3c168da73f9fb65a9b52ffe1d79e6e1
txkazoo/test/test_version.py
txkazoo/test/test_version.py
import txkazoo from twisted.trial.unittest import SynchronousTestCase class VersionTests(SynchronousTestCase): """ Tests for programmatically acquiring the version of txkazoo. """ def test_both_names(self): """ The version is programmatically avaialble on the ``txkazoo`` module as ``__version__`` and ``version``. They are the same object. """ self.assertIdentical(txkazoo.__version__, txkazoo.version)
# Copyright 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import txkazoo from twisted.trial.unittest import SynchronousTestCase class VersionTests(SynchronousTestCase): """ Tests for programmatically acquiring the version of txkazoo. """ def test_both_names(self): """ The version is programmatically avaialble on the ``txkazoo`` module as ``__version__`` and ``version``. They are the same object. """ self.assertIdentical(txkazoo.__version__, txkazoo.version)
Add copyright header to test
Add copyright header to test
Python
apache-2.0
rackerlabs/txkazoo
--- +++ @@ -1,3 +1,17 @@ +# Copyright 2013-2014 Rackspace, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import txkazoo from twisted.trial.unittest import SynchronousTestCase
5d93d1fb887d76d6fbe0a2f699e973ed9f6e7556
tests/test_navigation.py
tests/test_navigation.py
def get_menu_titles(page) -> list: page.wait_for_load_state() menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a") return [title.as_element().inner_text() for title in menu_list] flag = True def test_check_titles(page): global flag if(flag): page.goto("index.html") page.set_viewport_size({"width": 1050, "height": 600}) menu_list = get_menu_titles(page) page.wait_for_load_state() for menu_item in menu_list: right_arrow = page.query_selector("//*[@id='relations-next']/a") if(right_arrow): page.click("//*[@id='relations-next']/a") page.wait_for_load_state() page_title = page.title().split(" — ")[0] assert page_title == menu_item if("toctree" in page.url): # check titles for all sub-toctree content # list_url = page.split("/")[3::] # new_url = "/".join(list_url) # test_check_titles(new_url) flag = False test_check_titles(page) else: break
def get_menu_titles(page) -> list: page.wait_for_load_state() menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a") return [title.as_element().inner_text() for title in menu_list] flag = True def test_check_titles(page): global flag if(flag): page.goto("index.html") menu_list = get_menu_titles(page) page.wait_for_load_state() for menu_item in menu_list: right_arrow = page.query_selector("//*[@id='relations-next']/a") if(right_arrow): page.click("//*[@id='relations-next']/a") page.wait_for_load_state() page_title = page.title().split(" — ")[0] assert page_title == menu_item if("toctree" in page.url): flag = False test_check_titles(page) else: break
Delete debug comments and tool
Delete debug comments and tool
Python
agpl-3.0
PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis
--- +++ @@ -12,7 +12,6 @@ global flag if(flag): page.goto("index.html") - page.set_viewport_size({"width": 1050, "height": 600}) menu_list = get_menu_titles(page) page.wait_for_load_state() for menu_item in menu_list: @@ -23,10 +22,6 @@ page_title = page.title().split(" — ")[0] assert page_title == menu_item if("toctree" in page.url): - # check titles for all sub-toctree content - # list_url = page.split("/")[3::] - # new_url = "/".join(list_url) - # test_check_titles(new_url) flag = False test_check_titles(page) else:
0a09dbb6cc0104c9e1d3e504f84a70f729d14af1
tests/unit/test_utils.py
tests/unit/test_utils.py
# -*- coding: utf-8 -*- """ radish ~~~~~~ Behavior Driven Development tool for Python - the root from red to green Copyright: MIT, Timo Furrer <tuxtimo@gmail.com> """ import pytest import radish.utils as utils @pytest.mark.parametrize('basedirs, expected_basedirs', [ (['foo', 'bar'], ['foo', 'bar']), (['foo:bar', 'foobar'], ['foo', 'bar', 'foobar']), (['foo:bar', 'foobar', 'one:two:three'], ['foo', 'bar', 'foobar', 'one', 'two', 'three']), (['foo:', ':bar'], ['foo', 'bar']) ]) def test_flattened_basedirs(basedirs, expected_basedirs): """ Test flatten basedirs """ # given & when actual_basedirs = utils.flattened_basedirs(basedirs) # then assert actual_basedirs == expected_basedirs
# -*- coding: utf-8 -*- """ radish ~~~~~~ Behavior Driven Development tool for Python - the root from red to green Copyright: MIT, Timo Furrer <tuxtimo@gmail.com> """ import pytest import radish.utils as utils @pytest.mark.parametrize('basedirs, expected_basedirs', [ (['foo', 'bar'], ['foo', 'bar']), (['foo:bar', 'foobar'], ['foo', 'bar', 'foobar']), (['foo:bar', 'foobar', 'one:two:three'], ['foo', 'bar', 'foobar', 'one', 'two', 'three']), (['foo:', ':bar'], ['foo', 'bar']) ]) def test_flattened_basedirs(basedirs, expected_basedirs): """ Test flatten basedirs """ # given & when actual_basedirs = utils.flattened_basedirs(basedirs) # then assert actual_basedirs == expected_basedirs def test_make_unique_obj_list(): """ Test filter list by propertyName """ object_list = [ type('SomeObjectClass', (object,), {'propertyName' : '1'}), type('SomeObjectClass', (object,), {'propertyName' : '2'}), type('SomeObjectClass', (object,), {'propertyName' : '1'}), ] value_list = utils.make_unique_obj_list(object_list, lambda x: x.propertyName) value_list = list(map(lambda x: x.propertyName, value_list)) value_list.sort() assert value_list == ['1', '2']
Add a test for utils.make_unique_obj_list
Add a test for utils.make_unique_obj_list
Python
mit
radish-bdd/radish,radish-bdd/radish
--- +++ @@ -30,3 +30,19 @@ # then assert actual_basedirs == expected_basedirs + + +def test_make_unique_obj_list(): + """ + Test filter list by propertyName + """ + object_list = [ type('SomeObjectClass', (object,), {'propertyName' : '1'}), + type('SomeObjectClass', (object,), {'propertyName' : '2'}), + type('SomeObjectClass', (object,), {'propertyName' : '1'}), + ] + + value_list = utils.make_unique_obj_list(object_list, lambda x: x.propertyName) + value_list = list(map(lambda x: x.propertyName, value_list)) + value_list.sort() + + assert value_list == ['1', '2']
a2c419296a9f42f3c0d70ad3b7d10eed3702bd85
download-and-process.py
download-and-process.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import wikidata # The data (= ID) of the Wikidata dump dump_id = '20150105' # The files to download download_urls = [ "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-terms.nt.gz" % dump_id, "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-properties.nt.gz" % dump_id, "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-simple-statements.nt.gz" % dump_id ] # Download and process the files for url in download_urls: filename = wikidata.download(url) filename = wikidata.english_please(filename) if 'properties' in filename: wikidata.simple_properties(filename)
#!/usr/bin/env python # -*- coding: utf-8 -*- import wikidata # The data (= ID) of the Wikidata dump dump_id = '20150126' # The files to download download_urls = [ "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-terms.nt.gz" % dump_id, "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-properties.nt.gz" % dump_id, "https://tools.wmflabs.org/wikidata-exports/rdf/exports/%s/wikidata-simple-statements.nt.gz" % dump_id ] # Download and process the files for url in download_urls: filename = wikidata.download(url) filename = wikidata.english_please(filename) if 'properties' in filename: wikidata.simple_properties(filename)
Update version of Wikidata dump to 20150126
Update version of Wikidata dump to 20150126
Python
mit
patrickhoefler/wikidata-virtuoso,patrickhoefler/wikidata-virtuoso
--- +++ @@ -5,7 +5,7 @@ # The data (= ID) of the Wikidata dump -dump_id = '20150105' +dump_id = '20150126' # The files to download download_urls = [
f693f09bfedc4981557741a8ac445c160faab65d
assisstant/main.py
assisstant/main.py
#!/usr/bin/env python3 import sys from PyQt5.QtWidgets import QApplication from keyboard.ui.widgets import KeyboardWindow if __name__ == '__main__': app = QApplication([]) window = KeyboardWindow() window.showMaximized() sys.exit(app.exec())
#!/usr/bin/env python3 import sys import signal from PyQt5.QtWidgets import QApplication from keyboard.ui.widgets import KeyboardWindow if __name__ == '__main__': signal.signal(signal.SIGINT, signal.SIG_DFL) app = QApplication([]) window = KeyboardWindow() window.showMaximized() sys.exit(app.exec())
Add signal handler to quit the application
Add signal handler to quit the application
Python
apache-2.0
brainbots/assistant
--- +++ @@ -1,9 +1,11 @@ #!/usr/bin/env python3 import sys +import signal from PyQt5.QtWidgets import QApplication from keyboard.ui.widgets import KeyboardWindow if __name__ == '__main__': + signal.signal(signal.SIGINT, signal.SIG_DFL) app = QApplication([]) window = KeyboardWindow() window.showMaximized()
015ba19ceefacd82e68aa7a023e33140e868f5a6
cybox/common/defined_object.py
cybox/common/defined_object.py
class DefinedObject(object): pass
from StringIO import StringIO class DefinedObject(object): def to_xml(self): """Export an object as an XML String""" s = StringIO() self.to_obj().export(s, 0) return s.getvalue()
Add utility method to DefinedObject base class
Add utility method to DefinedObject base class
Python
bsd-3-clause
CybOXProject/python-cybox
--- +++ @@ -1,2 +1,10 @@ +from StringIO import StringIO + class DefinedObject(object): - pass + + def to_xml(self): + """Export an object as an XML String""" + + s = StringIO() + self.to_obj().export(s, 0) + return s.getvalue()
b9671e96e40b38d0662dbe0e32dca0ca0c5fe62e
tensor2tensor/rl/trainer_model_based_test.py
tensor2tensor/rl/trainer_model_based_test.py
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tiny run of trainer_model_based. Smoke test.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensor2tensor.rl import trainer_model_based import tensorflow as tf FLAGS = tf.flags.FLAGS class ModelRLExperimentTest(tf.test.TestCase): def test_basic(self): FLAGS.output_dir = tf.test.get_temp_dir() FLAGS.loop_hparams_set = "rl_modelrl_tiny" FLAGS.schedule = "train" # skip evaluation for world model training trainer_model_based.main(None) if __name__ == "__main__": tf.test.main()
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tiny run of trainer_model_based. Smoke test.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import shutil from tensor2tensor.rl import trainer_model_based import tensorflow as tf FLAGS = tf.flags.FLAGS class ModelRLExperimentTest(tf.test.TestCase): def setUp(self): super(ModelRLExperimentTest, self).setUp() FLAGS.output_dir = tf.test.get_temp_dir() shutil.rmtree(FLAGS.output_dir) os.mkdir(FLAGS.output_dir) FLAGS.schedule = "train" # skip evaluation for world model training def test_basic(self): FLAGS.loop_hparams_set = "rl_modelrl_tiny" trainer_model_based.main(None) def test_ae(self): FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny" trainer_model_based.main(None) if __name__ == "__main__": tf.test.main()
Add a test for the AE experiment
Add a test for the AE experiment
Python
apache-2.0
tensorflow/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor
--- +++ @@ -17,6 +17,9 @@ from __future__ import division from __future__ import print_function +import os +import shutil + from tensor2tensor.rl import trainer_model_based import tensorflow as tf @@ -26,10 +29,19 @@ class ModelRLExperimentTest(tf.test.TestCase): + def setUp(self): + super(ModelRLExperimentTest, self).setUp() + FLAGS.output_dir = tf.test.get_temp_dir() + shutil.rmtree(FLAGS.output_dir) + os.mkdir(FLAGS.output_dir) + FLAGS.schedule = "train" # skip evaluation for world model training + def test_basic(self): - FLAGS.output_dir = tf.test.get_temp_dir() FLAGS.loop_hparams_set = "rl_modelrl_tiny" - FLAGS.schedule = "train" # skip evaluation for world model training + trainer_model_based.main(None) + + def test_ae(self): + FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny" trainer_model_based.main(None)
fff3e2ed2ef1bb3f87f31178ef03d6752f2dc152
salt/modules/cmd.py
salt/modules/cmd.py
''' Module for shelling out commands, inclusion of this module should be configurable for security reasons ''' def echo(text): ''' Return a string - used for testing the connection ''' return text
''' Module for shelling out commands, inclusion of this module should be configurable for security reasons ''' def echo(text): ''' Return a string - used for testing the connection ''' print 'Echo got called!' return text
Add a debugging line to the echo command
Add a debugging line to the echo command
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
--- +++ @@ -7,4 +7,5 @@ ''' Return a string - used for testing the connection ''' + print 'Echo got called!' return text
bee340782a07a26a28bf89d91798d8091c42d1e1
config.py
config.py
import os class Config(object): DEBUG = False TESTING = False SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG = False class StagingConfig(Config): DEBUG = True DEVELOPMENT = True class DevelopmentConfig(Config): DEBUG = True DEVELOPMENT = True class TestingConfig(Config): TESTING = True
import os class Config(object): DEBUG = False TESTING = False SQLALCHEMY_DATABASE_URI = os.environ["DB_URL"] SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG = False class StagingConfig(Config): DEBUG = True DEVELOPMENT = True class DevelopmentConfig(Config): DEBUG = True DEVELOPMENT = True class TestingConfig(Config): TESTING = True
Use DB_URL set in heroku to avoid sqlalchemy dialect issue
Use DB_URL set in heroku to avoid sqlalchemy dialect issue
Python
mit
mdsrosa/routes_api_python
--- +++ @@ -4,7 +4,7 @@ class Config(object): DEBUG = False TESTING = False - SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] + SQLALCHEMY_DATABASE_URI = os.environ["DB_URL"] SQLALCHEMY_TRACK_MODIFICATIONS = True
2d8efdb0c5628f6262e7efd2d8a20369415982bb
teknologr/registration/views.py
teknologr/registration/views.py
from django.shortcuts import render, redirect from django.conf import settings from django.http import HttpResponseRedirect from django.views import View from members.programmes import DEGREE_PROGRAMME_CHOICES from registration.forms import RegistrationForm from registration.models import LimboMember class BaseView(View): context = {'DEBUG': settings.DEBUG} class HomeView(BaseView): template = 'registration.html' def get(self, request): self.context['programmes'] = DEGREE_PROGRAMME_CHOICES self.context['form'] = RegistrationForm() return render(request, self.template, self.context) class SubmitView(BaseView): template = 'submit.view' """ def get(self, request): applicant = LimboMember() """ def post(self, request): # TODO: add context form = RegistrationForm(request.POST) if form.is_valid(): registration = form.instance registration.save() else: self.context['form'] = form return render(request, 'registration.html', self.context, status=400) render(request, 'submit.html')
from django.shortcuts import render, redirect from django.conf import settings from django.http import HttpResponseRedirect from django.views import View from members.programmes import DEGREE_PROGRAMME_CHOICES from registration.forms import RegistrationForm from registration.models import LimboMember class BaseView(View): context = {'DEBUG': settings.DEBUG} class HomeView(BaseView): template = 'registration.html' def get(self, request): self.context['programmes'] = DEGREE_PROGRAMME_CHOICES self.context['form'] = RegistrationForm() return render(request, self.template, self.context) class SubmitView(BaseView): template = 'submit.html' def post(self, request): form = RegistrationForm(request.POST) if form.is_valid(): # TODO: send mail to user as affirmation (check `api/mailutils.py`) registration = form.instance registration.save() else: self.context['form'] = form return render(request, HomeView.template, self.context, status=400) return render(request, self.template, self.context)
Fix submit view in registration
Fix submit view in registration
Python
mit
Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io
--- +++ @@ -21,21 +21,16 @@ class SubmitView(BaseView): - template = 'submit.view' - - """ - def get(self, request): - applicant = LimboMember() - """ + template = 'submit.html' def post(self, request): - # TODO: add context form = RegistrationForm(request.POST) if form.is_valid(): + # TODO: send mail to user as affirmation (check `api/mailutils.py`) registration = form.instance registration.save() else: self.context['form'] = form - return render(request, 'registration.html', self.context, status=400) + return render(request, HomeView.template, self.context, status=400) - render(request, 'submit.html') + return render(request, self.template, self.context)
a5338e46ffd0684b2d4f21708176cf6c8bbdcc92
tests/changes/api/test_author_build_index.py
tests/changes/api/test_author_build_index.py
from uuid import uuid4 from changes.config import db from changes.models import Author from changes.testutils import APITestCase class AuthorBuildListTest(APITestCase): def test_simple(self): fake_author_id = uuid4() self.create_build(self.project) path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex) resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 0 author = Author(email=self.default_user.email, name='Foo Bar') db.session.add(author) build = self.create_build(self.project, author=author) path = '/api/0/authors/{0}/builds/'.format(author.id.hex) resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == build.id.hex self.login(self.default_user) path = '/api/0/authors/me/builds/' resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == build.id.hex
from uuid import uuid4 from changes.config import db from changes.models import Author from changes.testutils import APITestCase class AuthorBuildListTest(APITestCase): def test_simple(self): fake_author_id = uuid4() self.create_build(self.project) path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex) resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 0 author = Author(email=self.default_user.email, name='Foo Bar') db.session.add(author) build = self.create_build(self.project, author=author) path = '/api/0/authors/{0}/builds/'.format(author.id.hex) resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == build.id.hex path = '/api/0/authors/me/builds/' resp = self.client.get(path) assert resp.status_code == 401 self.login(self.default_user) path = '/api/0/authors/me/builds/' resp = self.client.get(path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == build.id.hex
Test self request without authentication
Test self request without authentication
Python
apache-2.0
wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes
--- +++ @@ -30,6 +30,11 @@ assert len(data) == 1 assert data[0]['id'] == build.id.hex + path = '/api/0/authors/me/builds/' + + resp = self.client.get(path) + assert resp.status_code == 401 + self.login(self.default_user) path = '/api/0/authors/me/builds/'
8c9a8e715643ac76181a9c05b6dd0afbcd9802a9
config.py
config.py
import os # # This is the configuration file of the application # # Please make sure you don't store here any secret information and use environment # variables # SQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL') SQLALCHEMY_POOL_RECYCLE = 60 SECRET_KEY = 'aiosdjsaodjoidjioewnioewfnoeijfoisdjf' FACEBOOK_KEY = "157028231131213" FACEBOOK_SECRET = "0437ee70207dca46609219b990be0614"
import os # # This is the configuration file of the application # # Please make sure you don't store here any secret information and use environment # variables # SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') SQLALCHEMY_POOL_RECYCLE = 60 SECRET_KEY = 'aiosdjsaodjoidjioewnioewfnoeijfoisdjf' FACEBOOK_KEY = "157028231131213" FACEBOOK_SECRET = "0437ee70207dca46609219b990be0614"
Use DATABASE_URL instead of CLEARDB_DATABASE_URL
Use DATABASE_URL instead of CLEARDB_DATABASE_URL For Postgress this is what we need, because this is what the following command sets: $ heroku pg:promote HEROKU_POSTGRESQL_BLUE_URL Promoting HEROKU_POSTGRESQL_BLUE_URL to DATABASE_URL... done
Python
bsd-3-clause
HamutalCohen3/anyway,boazin/anyway,boazin/anyway,yosinv/anyway,idogi/anyway,hasadna/anyway,omerxx/anyway,yosinv/anyway,njenia/anyway,OmerSchechter/anyway,OmerSchechter/anyway,idogi/anyway,HamutalCohen3/anyway,njenia/anyway,esegal/anyway,eladkarakuli/anyway,HamutalCohen3/anyway,hasadna/anyway,eladkarakuli/anyway,boazin/anyway,njenia/anyway,hasadna/anyway,omerxx/anyway,omerxx/anyway,hasadna/anyway,esegal/anyway,esegal/anyway,OmerSchechter/anyway,eladkarakuli/anyway,yosinv/anyway,idogi/anyway
--- +++ @@ -8,7 +8,7 @@ # -SQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL') +SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') SQLALCHEMY_POOL_RECYCLE = 60
a7535e3abc791bc3b13788fa321513a4a840f5a8
rnacentral/nhmmer/settings.py
rnacentral/nhmmer/settings.py
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # minimum query sequence length MIN_LENGTH = 11 # maximum query sequence length MAX_LENGTH = 10000 # Redis results expiration time EXPIRATION = 60*60*24*7 # seconds # maximum time to run nhmmer MAX_RUN_TIME = 60*60 # seconds # full path to query files QUERY_DIR = '' # full path to results files RESULTS_DIR = '' # full path to nhmmer executable NHMMER_EXECUTABLE = '' # full path to sequence database SEQDATABASE = ''
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # minimum query sequence length MIN_LENGTH = 10 # maximum query sequence length MAX_LENGTH = 10000 # Redis results expiration time EXPIRATION = 60*60*24*7 # seconds # maximum time to run nhmmer MAX_RUN_TIME = 60*60 # seconds # full path to query files QUERY_DIR = '' # full path to results files RESULTS_DIR = '' # full path to nhmmer executable NHMMER_EXECUTABLE = '' # full path to sequence database SEQDATABASE = ''
Set minimum query length to 10 nts
Set minimum query length to 10 nts The shortest length that the nhmmer alphabet guesser will work on is 10.
Python
apache-2.0
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
--- +++ @@ -12,7 +12,7 @@ """ # minimum query sequence length -MIN_LENGTH = 11 +MIN_LENGTH = 10 # maximum query sequence length MAX_LENGTH = 10000
594869bca15b93a8e583e29b1a65496bd6c058f2
config.py
config.py
import os basedir = os.path.abspath(os.path.dirname(__file__)) class Config: SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd' SQLALCHEMY_COMMIT_ON_TEARDOWN = True @staticmethod def init_app(app): pass class DevelopmentConfig(Config): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'weather.sqlite') config = { 'default': DevelopmentConfig }
import os basedir = os.path.abspath(os.path.dirname(__file__)) class Config: SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd' SQLALCHEMY_COMMIT_ON_TEARDOWN = True SQLALCHEMY_TRACK_MODIFICATIONS = False @staticmethod def init_app(app): pass class DevelopmentConfig(Config): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'weather.sqlite') config = { 'default': DevelopmentConfig }
Add SQLALCHEMY_TRACK_MODIFICATION to supress warnings
Add SQLALCHEMY_TRACK_MODIFICATION to supress warnings
Python
mit
fernando24164/flask_api,fernando24164/flask_api
--- +++ @@ -5,6 +5,7 @@ class Config: SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd' SQLALCHEMY_COMMIT_ON_TEARDOWN = True + SQLALCHEMY_TRACK_MODIFICATIONS = False @staticmethod def init_app(app):
b4aae8d7f87bd3f1bb27610440c20ab1110d2b3a
dbaas/util/update_instances_with_offering.py
dbaas/util/update_instances_with_offering.py
# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if instance.is_database: instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if instance.is_database: instance.offering = strong_offering else: instance.oferring = weaker_offering instance.save()
Fix script to update offering on instances
Fix script to update offering on instances
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
--- +++ @@ -17,8 +17,8 @@ for instance in infra_offering.databaseinfra.instances.all(): if instance.is_database: - instance.offering = weaker_offering + instance.offering = strong_offering else: - instance.oferring = strong_offering + instance.oferring = weaker_offering instance.save()
a18ae589f8217bc26bf1d4a8841c637354aedbaa
ispmgr/wwwdomain.py
ispmgr/wwwdomain.py
import json import api class WWWDomain(api.API): def __init__(self, auth_handler): self.url = auth_handler.url self.sessid = auth_handler.sessid self.func = 'wwwdomain.edit' self.out = 'json' self.params = { 'auth' : self.sessid, 'out' : self.out, 'func' : self.func, } def list(self, domain=None): """List all www domains. If domains is used, list details about this one.""" if domain: self.params['elid'] = domain else: self.params['func'] = 'wwwdomain' data = self.process_api(self.url, self.params) out = json.load(data) try: return out['elem'] except KeyError: return out def add(self, domain='', owner='', admin='', ip='', **kwargs): """Add a new wwwdomain to configuration. If a DNS server is configurated, API adds domain there too.""" self.params['sok'] = 'yes' self.params['domain'] = domain self.params['owner'] = owner self.params['admin'] = admin self.params['ip'] = ip for key in kwargs: self.params[key] = kwargs[key] data = self.process_api(self.url, self.params) out = json.load(data) return out
import json import api class WWWDomain(api.API): def __init__(self, auth_handler): self.url = auth_handler.url self.sessid = auth_handler.sessid self.func = 'wwwdomain.edit' self.out = 'json' self._clear_params() def _clear_params(self): try: self.params.clear() except NameError: pass self.params = { 'auth' : self.sessid, 'out' : 'json', 'func' : self.func, } def list(self, domain=None): """List all www domains. If domains is used, list details about this one.""" self._clear_params() if domain: self.params['elid'] = domain else: self.params['func'] = 'wwwdomain' data = self.process_api(self.url, self.params) out = json.load(data) try: return out['elem'] except KeyError: return out def add(self, domain='', owner='', admin='', ip='', **kwargs): """Add a new wwwdomain to configuration. If a DNS server is configurated, API adds domain there too.""" self._clear_params() self.params['sok'] = 'yes' self.params['domain'] = domain self.params['owner'] = owner self.params['admin'] = admin self.params['ip'] = ip for key in kwargs: self.params[key] = kwargs[key] data = self.process_api(self.url, self.params) out = json.load(data) return out
Clear parameters before editing/adding. Before it was been unpossible to call two functions in sequence.
Clear parameters before editing/adding. Before it was been unpossible to call two functions in sequence.
Python
mit
jakubjedelsky/python-ispmgr
--- +++ @@ -8,14 +8,22 @@ self.sessid = auth_handler.sessid self.func = 'wwwdomain.edit' self.out = 'json' + self._clear_params() + + def _clear_params(self): + try: + self.params.clear() + except NameError: + pass self.params = { 'auth' : self.sessid, - 'out' : self.out, + 'out' : 'json', 'func' : self.func, } def list(self, domain=None): """List all www domains. If domains is used, list details about this one.""" + self._clear_params() if domain: self.params['elid'] = domain else: @@ -30,6 +38,7 @@ def add(self, domain='', owner='', admin='', ip='', **kwargs): """Add a new wwwdomain to configuration. If a DNS server is configurated, API adds domain there too.""" + self._clear_params() self.params['sok'] = 'yes' self.params['domain'] = domain self.params['owner'] = owner
c0a341bb285e9906747c1f872e3b022a3a491044
falmer/events/filters.py
falmer/events/filters.py
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
Add type filter by slug
Add type filter by slug
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
--- +++ @@ -33,6 +33,7 @@ brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') + type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte')
a7f761d662b98dea2b16f711e7d17ad826f491af
onestop/test_gtfs.py
onestop/test_gtfs.py
"""geohash unit tests.""" import unittest import os import json import gtfs class TestGTFSReader(unittest.TestCase): test_gtfs = os.path.join('examples', 'sample-feed.zip') def test_readcsv(self): expect = { 'stop_lat': '36.425288', 'zone_id': '', 'stop_lon': '-117.133162', 'stop_url': '', 'stop_id': 'FUR_CREEK_RES', 'stop_desc': '', 'stop_name': 'Furnace Creek Resort (Demo)' } f = gtfs.GTFSReader(self.test_gtfs) stops = f.readcsv('stops.txt') found = filter(lambda x:x['stop_id'] == expect['stop_id'], stops)[0] for k in expect: assert expect[k] == found[k] def test_stops_centroid(self): f = gtfs.GTFSReader(self.test_gtfs) centroid = f.stops_centroid() expect = (-116.7720483, 36.8196683) self.assertAlmostEqual(centroid[0], expect[0]) self.assertAlmostEqual(centroid[1], expect[1]) def test_stops_geohash(self): f = gtfs.GTFSReader(self.test_gtfs) g = f.stops_geohash() assert g == '9qs' if __name__ == '__main__': unittest.main()
"""geohash unit tests.""" import unittest import os import json import gtfs class TestGTFSReader(unittest.TestCase): test_gtfs = os.path.join('examples', 'sample-feed.zip') def test_readcsv(self): expect = { 'stop_lat': '36.425288', 'zone_id': '', 'stop_lon': '-117.133162', 'stop_url': '', 'stop_id': 'FUR_CREEK_RES', 'stop_desc': '', 'stop_name': 'Furnace Creek Resort (Demo)' } f = gtfs.GTFSReader(self.test_gtfs) stops = f.readcsv('stops.txt') found = filter(lambda x:x['stop_id'] == expect['stop_id'], stops)[0] for k in expect: assert expect[k] == found[k] if __name__ == '__main__': unittest.main()
Remove this test for now
Remove this test for now
Python
mit
srthurman/transitland-python-client,transitland/transitland-python-client
--- +++ @@ -24,17 +24,5 @@ for k in expect: assert expect[k] == found[k] - def test_stops_centroid(self): - f = gtfs.GTFSReader(self.test_gtfs) - centroid = f.stops_centroid() - expect = (-116.7720483, 36.8196683) - self.assertAlmostEqual(centroid[0], expect[0]) - self.assertAlmostEqual(centroid[1], expect[1]) - - def test_stops_geohash(self): - f = gtfs.GTFSReader(self.test_gtfs) - g = f.stops_geohash() - assert g == '9qs' - if __name__ == '__main__': unittest.main()
dd58dbbbdb9b3a9479fa5db38a4e4038a6514fef
configReader.py
configReader.py
class ConfigReader(): def __init__(self): self.keys={} #Read Keys from file def readKeys(self): keysFile=open("config.txt","r") fileLines=keysFile.readlines() keysFile.close() self.keys.clear() for item in fileLines: #If last char is \n if (item[-1]=='\n'): item=item[:-1] #If a commented line if (item[0]=='#'): pass #If a new line is the first char elif (item[0]=='\n'): pass else: #Get Position of equal sign pos=item.index('=') #Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end) self.keys[item[0:pos]]=item[pos+1:] #Return the keys def getKeys(self): return self.keys
class ConfigReader(): def __init__(self): self.keys={} #Read Keys from file def readKeys(self): keysFile=open("config.txt","r") fileLines=keysFile.readlines() keysFile.close() self.keys.clear() for item in fileLines: #If last char is \n if (item[-1]=='\n'): item=item[:-1] #If a commented line if (item[0]=='#'): continue #If a new line is the first char elif (item[0]=='\n'): continue else: #Get Position of equal sign pos=item.index('=') #Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end) self.keys[item[0:pos]]=item[pos+1:] #Return the keys def getKeys(self): return self.keys
Change 'pass' statements to 'continue' statements.
Change 'pass' statements to 'continue' statements.
Python
mit
ollien/PyConfigReader
--- +++ @@ -14,10 +14,10 @@ item=item[:-1] #If a commented line if (item[0]=='#'): - pass + continue #If a new line is the first char elif (item[0]=='\n'): - pass + continue else: #Get Position of equal sign pos=item.index('=')
a8966a4d3f9a160af3865b8cadb26e58eb36fd64
src/database/__init__.py
src/database/__init__.py
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker session = None def init_session(connection_string=None, drop=False): if connection_string is None: connection_string = 'sqlite://' from database.model import Base global session if drop: try: old_session = session Base.metadata.drop_all(bind=old_session.bind) except: pass engine = create_engine(connection_string, echo=True) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.metadata.create_all(bind=engine) session = db_session
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.pool import StaticPool session = None def init_session(connection_string=None, drop=False): if connection_string is None: engine = create_engine('sqlite://', echo=True, connect_args={'check_same_thread':False}, poolclass=StaticPool) else: engine = create_engine(connection_string) from database.model import Base global session if drop: try: old_session = session Base.metadata.drop_all(bind=old_session.bind) except: pass db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.metadata.create_all(bind=engine) session = db_session
Fix the database session init to work with the flask debug server.
Fix the database session init to work with the flask debug server. The debug webserver consists of two parts: the watcher that watches the files for changes and the worker that is forked and will be restarted after each modification. Sqlachemy uses a SingletonPool that will not work with this if the database was initialized within the watcher. See [1] for more detailed information. [1] http://docs.sqlalchemy.org/en/rel_0_8/dialects/sqlite.html#threading-pooling-behavior
Python
bsd-3-clause
janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
--- +++ @@ -1,5 +1,6 @@ from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker +from sqlalchemy.pool import StaticPool session = None @@ -7,7 +8,12 @@ def init_session(connection_string=None, drop=False): if connection_string is None: - connection_string = 'sqlite://' + engine = create_engine('sqlite://', + echo=True, + connect_args={'check_same_thread':False}, + poolclass=StaticPool) + else: + engine = create_engine(connection_string) from database.model import Base @@ -20,7 +26,6 @@ except: pass - engine = create_engine(connection_string, echo=True) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
0dc3e4ffe86f25697799b8092822a8d77a22493b
pi_mqtt_gpio/__init__.py
pi_mqtt_gpio/__init__.py
import sys print("FATAL ERROR: The file at pi_mqtt_gpio/__init__.py should be replaced us" "ing 'make schema' before packaging.") sys.exit(1)
import yaml CONFIG_SCHEMA = yaml.load(""" mqtt: type: dict required: yes schema: host: type: string empty: no required: no default: localhost port: type: integer min: 1 max: 65535 required: no default: 1883 user: type: string required: no default: "" password: type: string required: no default: "" client_id: type: string required: no default: "" topic_prefix: type: string required: no default: "" coerce: rstrip_slash protocol: type: string required: no empty: no coerce: tostring default: "3.1.1" allowed: - "3.1" - "3.1.1" status_topic: type: string required: no default: status status_payload_running: type: string required: no default: running status_payload_stopped: type: string required: no default: stopped status_payload_dead: type: string required: no default: dead gpio_modules: type: list required: yes schema: type: dict allow_unknown: yes schema: name: type: string required: yes empty: no module: type: string required: yes empty: no cleanup: type: boolean required: no default: yes digital_inputs: type: list required: no default: [] schema: type: dict schema: name: type: string required: yes empty: no module: type: string required: yes empty: no pin: type: integer required: yes min: 0 on_payload: type: string required: yes empty: no off_payload: type: string required: yes empty: no pullup: type: boolean required: no default: no pulldown: type: boolean required: no default: no retain: type: boolean required: no default: no digital_outputs: type: list required: no default: [] schema: type: dict schema: name: type: string required: yes module: type: string required: yes pin: type: integer required: yes min: 0 on_payload: type: string required: no empty: no off_payload: type: string required: no empty: no inverted: type: boolean required: no default: no initial: type: string required: no allowed: - high - low retain: type: boolean required: no default: no """)
Add schema to repo for now
Add schema to repo for now
Python
mit
flyte/pi-mqtt-gpio
--- +++ @@ -1,4 +1,163 @@ -import sys -print("FATAL ERROR: The file at pi_mqtt_gpio/__init__.py should be replaced us" - "ing 'make schema' before packaging.") -sys.exit(1) +import yaml + +CONFIG_SCHEMA = yaml.load(""" +mqtt: + type: dict + required: yes + schema: + host: + type: string + empty: no + required: no + default: localhost + port: + type: integer + min: 1 + max: 65535 + required: no + default: 1883 + user: + type: string + required: no + default: "" + password: + type: string + required: no + default: "" + client_id: + type: string + required: no + default: "" + topic_prefix: + type: string + required: no + default: "" + coerce: rstrip_slash + protocol: + type: string + required: no + empty: no + coerce: tostring + default: "3.1.1" + allowed: + - "3.1" + - "3.1.1" + status_topic: + type: string + required: no + default: status + status_payload_running: + type: string + required: no + default: running + status_payload_stopped: + type: string + required: no + default: stopped + status_payload_dead: + type: string + required: no + default: dead + +gpio_modules: + type: list + required: yes + schema: + type: dict + allow_unknown: yes + schema: + name: + type: string + required: yes + empty: no + module: + type: string + required: yes + empty: no + cleanup: + type: boolean + required: no + default: yes + +digital_inputs: + type: list + required: no + default: [] + schema: + type: dict + schema: + name: + type: string + required: yes + empty: no + module: + type: string + required: yes + empty: no + pin: + type: integer + required: yes + min: 0 + on_payload: + type: string + required: yes + empty: no + off_payload: + type: string + required: yes + empty: no + pullup: + type: boolean + required: no + default: no + pulldown: + type: boolean + required: no + default: no + retain: + type: boolean + required: no + default: no + +digital_outputs: + type: list + required: no + default: [] + schema: + type: dict + schema: + name: + type: string + required: yes + module: + type: string + required: yes + pin: + type: integer + required: yes + min: 0 + on_payload: + type: string + required: no + empty: no + off_payload: + type: string + required: no + empty: no + inverted: + type: boolean + required: no + default: no + initial: + type: string + required: no + allowed: + - high + - low + retain: + type: boolean + required: no + default: no + + +""")
d0c284139fe475a62fa53cde7e3e20cf2cc2d977
plugins/FileHandlers/STLWriter/__init__.py
plugins/FileHandlers/STLWriter/__init__.py
from . import STLWriter def getMetaData(): return { 'type': 'mesh_writer', 'plugin': { "name": "STL Writer" } } def register(app): return STLWriter.STLWriter()
from . import STLWriter def getMetaData(): return { 'type': 'mesh_writer', 'plugin': { "name": "STL Writer" }, 'mesh_writer': { 'extension': 'stl', 'description': 'STL File' } } def register(app): return STLWriter.STLWriter()
Add writer metadata to the STL writer plugin so it can be used in Cura
Add writer metadata to the STL writer plugin so it can be used in Cura
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
--- +++ @@ -4,6 +4,10 @@ 'type': 'mesh_writer', 'plugin': { "name": "STL Writer" + }, + 'mesh_writer': { + 'extension': 'stl', + 'description': 'STL File' } }
6f0a35372d625f923b9093194540cf0b0e9f054d
platformio_api/__init__.py
platformio_api/__init__.py
# Copyright (C) Ivan Kravets <me@ikravets.com> # See LICENSE for details. import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "me@ikravets.com" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*10, LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
# Copyright (C) Ivan Kravets <me@ikravets.com> # See LICENSE for details. import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "me@ikravets.com" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
Increase repo size to 20Mb
Increase repo size to 20Mb
Python
apache-2.0
orgkhnargh/platformio-api,platformio/platformio-api
--- +++ @@ -26,7 +26,7 @@ GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, - MAX_DLFILE_SIZE=1024*1024*10, + MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) )
72b899fd0ae8bd07edf454d410e65ff00a9ca772
generic_links/models.py
generic_links/models.py
# -*- coding: UTF-8 -*- from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField(db_index=True) content_object = generic.GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey("auth.User", null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url
# -*- coding: UTF-8 -*- from django import VERSION from django.conf import settings from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ def get_user_model_fk_ref(): """Get user model depending on Django version.""" ver = VERSION if ver[0] >= 1 and ver[1] >= 5: return settings.AUTH_USER_MODEL else: return 'auth.User' class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField(db_index=True) content_object = generic.GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model_fk_ref(), null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url
Update for custom user model support
Update for custom user model support
Python
bsd-3-clause
matagus/django-generic-links,matagus/django-generic-links
--- +++ @@ -1,8 +1,20 @@ # -*- coding: UTF-8 -*- +from django import VERSION +from django.conf import settings from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ + + +def get_user_model_fk_ref(): + """Get user model depending on Django version.""" + ver = VERSION + + if ver[0] >= 1 and ver[1] >= 5: + return settings.AUTH_USER_MODEL + else: + return 'auth.User' class GenericLink(models.Model): @@ -18,7 +30,7 @@ title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) - user = models.ForeignKey("auth.User", null=True, blank=True) + user = models.ForeignKey(get_user_model_fk_ref(), null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True)
72fe214ba0bb66e5f5c4ca9670c14468304bdd49
winthrop/people/views.py
winthrop/people/views.py
from django.http import JsonResponse from dal import autocomplete from .models import Person from winthrop.books.models import PersonBook from django.db.models import BooleanField, Case, When, Value from .viaf import ViafAPI class ViafAutoSuggest(autocomplete.Select2ListView): """ View to provide VIAF suggestions for autocomplete info""" def get(self, request, *args, **kwargs): """Return JSON with suggested VIAF ids and display names.""" viaf = ViafAPI() result = viaf.suggest(self.q) # Strip names that are not personal for item in result: if item['nametype'] is not 'personal': del item return JsonResponse({ 'results': [dict( id=viaf.uri_from_id(item['viafid']), text=(item['displayForm']), ) for item in result], }) class PersonAutocomplete(autocomplete.Select2QuerySetView): '''Basic person autocomplete lookup, for use with django-autocomplete-light. Restricted to staff only. Also includes optional winthrop query string that sets whether to prioritize Winthrops based on search criteria in function. Can be extended by Q objects if necesary for more complex searches.''' # NOTE staff restrection applied in url config def get_queryset(self): winthrop_only = self.request.GET.get('winthrop', None) people = Person.objects.filter(authorized_name__icontains=self.q) if winthrop_only: people = people.filter(personbook__isnull=False) return people
from django.http import JsonResponse from dal import autocomplete from .models import Person from winthrop.books.models import PersonBook from django.db.models import BooleanField, Case, When, Value from .viaf import ViafAPI class ViafAutoSuggest(autocomplete.Select2ListView): """ View to provide VIAF suggestions for autocomplete info""" def get(self, request, *args, **kwargs): """Return JSON with suggested VIAF ids and display names.""" viaf = ViafAPI() result = viaf.suggest(self.q) # Strip names that are not personal for item in result: if item['nametype'] is not 'personal': del item return JsonResponse({ 'results': [dict( id=viaf.uri_from_id(item['viafid']), text=(item['displayForm']), ) for item in result], }) class PersonAutocomplete(autocomplete.Select2QuerySetView): '''Basic person autocomplete lookup, for use with django-autocomplete-light. Restricted to staff only. Also includes optional winthrop query string that sets whether to prioritize Winthrops based on search criteria in function. Can be extended by Q objects if necesary for more complex searches.''' # NOTE staff restrection applied in url config def get_queryset(self): winthrop_only = self.request.GET.get('winthrop', None) people = Person.objects.filter(authorized_name__icontains=self.q) if winthrop_only: people = people.filter(personbook__isnull=False) return people
Fix people autosuggest with winthrop_only flag
Fix people autosuggest with winthrop_only flag
Python
apache-2.0
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
--- +++ @@ -36,6 +36,6 @@ def get_queryset(self): winthrop_only = self.request.GET.get('winthrop', None) people = Person.objects.filter(authorized_name__icontains=self.q) - if winthrop_only: + if winthrop_only: people = people.filter(personbook__isnull=False) return people
76bb7b55b175a6495d0db2881f8d94f03708522c
poeditor/__init__.py
poeditor/__init__.py
""" API Client Interface for POEditor API (https://poeditor.com). Usage: >>> from poeditor import POEditorAPI >>> client = POEditorAPI(api_token='my_token') >>> projects = client.list_projects() """ __version__ = "1.1.0" try: from client import POEditorAPI, POEditorException, POEditorArgsException except ImportError: pass
""" API Client Interface for POEditor API (https://poeditor.com). Usage: >>> from poeditor import POEditorAPI >>> client = POEditorAPI(api_token='my_token') >>> projects = client.list_projects() """ __version__ = "1.1.0" try: from .client import POEditorAPI, POEditorException, POEditorArgsException except ImportError: pass
Fix package ImportError for Python3
Fix package ImportError for Python3 https://docs.python.org/3.0/whatsnew/3.0.html#removed-syntax "The only acceptable syntax for relative imports is from .[module] import name. All import forms not starting with . are interpreted as absolute imports. (PEP 0328)"
Python
mit
sporteasy/python-poeditor
--- +++ @@ -11,6 +11,6 @@ __version__ = "1.1.0" try: - from client import POEditorAPI, POEditorException, POEditorArgsException + from .client import POEditorAPI, POEditorException, POEditorArgsException except ImportError: pass
56d1416a486f48fcbcf425d535268dec19715f2e
blueplayer/__main__.py
blueplayer/__main__.py
import sys import serial import threading from blueplayer import blueplayer def main(): args = sys.argv[1:] # first argument should be a serial terminal to open if not len(args): port = "/dev/ttyAMA0" else: port = args[0] player = None with serial.Serial(port) as serial_port: try: player = blueplayer.BluePlayer(serial_port) player_thread = threading.Thread(target=player.start) serial_thread = threading.Thread(target=player.run) player_thread.start() serial_thread.start() player_thread.join() serial_thread.join() except KeyboardInterrupt as ex: print("\nBluePlayer cancelled by user") except Exception as ex: print("How embarrassing. The following error occurred {}".format(ex)) finally: if player: player.end() player.stop() if __name__ == "__main__": main()
import sys import serial import threading from blueplayer import blueplayer def main(): args = sys.argv[1:] # first argument should be a serial terminal to open if not len(args): port = "/dev/ttyS0" else: port = args[0] player = None with serial.Serial(port, 19200) as serial_port: try: player = blueplayer.BluePlayer(serial_port) player_thread = threading.Thread(target=player.start) serial_thread = threading.Thread(target=player.run) player_thread.start() serial_thread.start() player_thread.join() serial_thread.join() except KeyboardInterrupt as ex: print("\nBluePlayer cancelled by user") except Exception as ex: print("How embarrassing. The following error occurred {}".format(ex)) finally: if player: player.end() player.stop() if __name__ == "__main__": main()
Update serial port and baud rate
Update serial port and baud rate
Python
mit
dylwhich/rpi-ipod-emulator
--- +++ @@ -8,13 +8,13 @@ args = sys.argv[1:] # first argument should be a serial terminal to open if not len(args): - port = "/dev/ttyAMA0" + port = "/dev/ttyS0" else: port = args[0] player = None - with serial.Serial(port) as serial_port: + with serial.Serial(port, 19200) as serial_port: try: player = blueplayer.BluePlayer(serial_port)
851e515379a5da66b4171f5340b910a2db84d0f0
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ilya Akhmadullin # Copyright (c) 2013 Ilya Akhmadullin # # License: MIT # """This module exports the jscs plugin class.""" from SublimeLinter.lint import Linter class Jscs(Linter): """Provides an interface to jscs.""" syntax = ('javascript', 'html', 'html 5') cmd = 'jscs -r checkstyle' regex = ( r'^\s+?<error line="(?P<line>\d+)" ' r'column="(?P<col>\d+)" ' # jscs always reports with error severity; show as warning r'severity="(?P<warning>error)" ' r'message="(?P<message>.+?)"' ) multiline = True selectors = {'html': 'source.js.embedded.html'} tempfile_suffix = 'js'
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ilya Akhmadullin # Copyright (c) 2013 Ilya Akhmadullin # # License: MIT # """This module exports the jscs plugin class.""" from SublimeLinter.lint import Linter class Jscs(Linter): """Provides an interface to jscs.""" syntax = ('javascript', 'html', 'html 5') cmd = 'jscs -r checkstyle' config_file = ('-c', '.jscs.json') regex = ( r'^\s+?<error line="(?P<line>\d+)" ' r'column="(?P<col>\d+)" ' # jscs always reports with error severity; show as warning r'severity="(?P<warning>error)" ' r'message="(?P<message>.+?)"' ) multiline = True selectors = {'html': 'source.js.embedded.html'} tempfile_suffix = 'js'
Use the config_file attribute to find .jscs.json
Use the config_file attribute to find .jscs.json
Python
mit
roberthoog/SublimeLinter-jscs,SublimeLinter/SublimeLinter-jscs
--- +++ @@ -19,6 +19,7 @@ syntax = ('javascript', 'html', 'html 5') cmd = 'jscs -r checkstyle' + config_file = ('-c', '.jscs.json') regex = ( r'^\s+?<error line="(?P<line>\d+)" ' r'column="(?P<col>\d+)" '
0b853eb0d6a9d5f2ed6bb098c1fc1f9231cc52dc
djangobotcfg/status.py
djangobotcfg/status.py
from buildbot.status import html, words from buildbot.status.web.authz import Authz from .djangoauth import DjangoAuth authz = Authz( auth = DjangoAuth(), gracefulShutdown = 'auth', forceBuild = 'auth', forceAllBuilds = 'auth', pingBuilder = 'auth', stopBuild = 'auth', stopAllBuilds = 'auth', cancelPendingBuild = 'auth', stopChange = 'auth', cleanShutdown = 'auth', ) def get_status(secrets): return [ html.WebStatus( http_port = '8010', authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), words.IRC( host = 'irc.freenode.net', channels = ['#django-dev'], nick = 'djbuildbot', password = str(secrets['irc']['password']), notify_events = { 'successToFailure': True, 'failureToSuccess': True, } ), ]
from buildbot.status import html, words from buildbot.status.web.authz import Authz from .djangoauth import DjangoAuth authz = Authz( auth = DjangoAuth(), gracefulShutdown = 'auth', forceBuild = 'auth', forceAllBuilds = 'auth', pingBuilder = 'auth', stopBuild = 'auth', stopAllBuilds = 'auth', cancelPendingBuild = 'auth', stopChange = 'auth', cleanShutdown = 'auth', ) def get_status(secrets): return [ html.WebStatus( http_port = '8010', authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), words.IRC( host = 'irc.freenode.net', channels = ['#django-dev'], nick = 'djbuilds', password = str(secrets['irc']['password']), notify_events = { 'successToFailure': True, 'failureToSuccess': True, } ), ]
Use an IRC username that's not already registered.
Use an IRC username that's not already registered.
Python
bsd-3-clause
jacobian-archive/django-buildmaster,hochanh/django-buildmaster
--- +++ @@ -32,7 +32,7 @@ words.IRC( host = 'irc.freenode.net', channels = ['#django-dev'], - nick = 'djbuildbot', + nick = 'djbuilds', password = str(secrets['irc']['password']), notify_events = { 'successToFailure': True,
c3ada10657efa7435564a1d6f8ff7afbfb585f54
pombola/nigeria/tests.py
pombola/nigeria/tests.py
import unittest import doctest from . import views from django.test import TestCase from nose.plugins.attrib import attr # Needed to run the doc tests in views.py def suite(): suite = unittest.TestSuite() suite.addTest(doctest.DocTestSuite(views)) return suite @attr(country='nigeria') class HomeViewTest(TestCase): def test_homepage_context(self): response = self.client.get('/') self.assertIn('featured_person', response.context) self.assertIn('featured_persons', response.context) self.assertIn('editable_content', response.context)
import unittest import doctest from . import views from django.test import TestCase from nose.plugins.attrib import attr from pombola.info.models import InfoPage # Needed to run the doc tests in views.py def suite(): suite = unittest.TestSuite() suite.addTest(doctest.DocTestSuite(views)) return suite @attr(country='nigeria') class HomeViewTest(TestCase): def test_homepage_context(self): response = self.client.get('/') self.assertIn('featured_person', response.context) self.assertIn('featured_persons', response.context) self.assertIn('editable_content', response.context) @attr(country='nigeria') class InfoBlogListTest(TestCase): def setUp(self): self.info_page = InfoPage.objects.create( slug='escaping-test', kind='blog', title='Escaping Test', markdown_content="\nTesting\n\n**Escaped**\n\nContent" ) def tearDown(self): self.info_page.delete() def test_html_not_escaped(self): response = self.client.get('/blog/') self.assertNotIn('&lt;p&gt;', response.content)
Add a regression test for displaying escaped HTML in the blog
NG: Add a regression test for displaying escaped HTML in the blog A fixed version of the test that Chris Mytton suggested in: https://github.com/mysociety/pombola/pull/1587
Python
agpl-3.0
patricmutwiri/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,ken-muturi/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,hzj123/56th
--- +++ @@ -5,6 +5,8 @@ from django.test import TestCase from nose.plugins.attrib import attr + +from pombola.info.models import InfoPage # Needed to run the doc tests in views.py @@ -21,3 +23,20 @@ self.assertIn('featured_person', response.context) self.assertIn('featured_persons', response.context) self.assertIn('editable_content', response.context) + +@attr(country='nigeria') +class InfoBlogListTest(TestCase): + + def setUp(self): + self.info_page = InfoPage.objects.create( + slug='escaping-test', + kind='blog', + title='Escaping Test', markdown_content="\nTesting\n\n**Escaped**\n\nContent" + ) + + def tearDown(self): + self.info_page.delete() + + def test_html_not_escaped(self): + response = self.client.get('/blog/') + self.assertNotIn('&lt;p&gt;', response.content)
6ef289403b4d88bc5e1a70568133924de54c2b9f
pyang/plugins/bbf.py
pyang/plugins/bbf.py
"""BBF usage guidelines plugin See BBF Assigned Names and Numbers at https://wiki.broadband-forum.org/display/BBF/Assigned+Names+and+Numbers#AssignedNamesandNumbers-URNNamespaces """ import optparse from pyang import plugin from pyang.plugins import lint def pyang_plugin_init(): plugin.register_plugin(BBFPlugin()) class BBFPlugin(lint.LintPlugin): def __init__(self): lint.LintPlugin.__init__(self) self.namespace_prefixes = ['urn:bbf:yang:'] self.modulename_prefixes = ['bbf'] def add_opts(self, optparser): optlist = [ optparse.make_option("--bbf", dest="bbf", action="store_true", help="Validate the module(s) according to " \ "BBF rules."), ] optparser.add_options(optlist) def setup_ctx(self, ctx): if not ctx.opts.bbf: return self._setup_ctx(ctx)
"""BBF usage guidelines plugin See BBF Assigned Names and Numbers at https://wiki.broadband-forum.org/display/BBF/Assigned+Names+and+Numbers#AssignedNamesandNumbers-URNNamespaces """ import optparse from pyang import plugin from pyang.plugins import lint def pyang_plugin_init(): plugin.register_plugin(BBFPlugin()) class BBFPlugin(lint.LintPlugin): def __init__(self): lint.LintPlugin.__init__(self) self.namespace_prefixes = ['urn:bbf:yang:'] self.modulename_prefixes = ['bbf'] self.ensure_hyphenated_names = True def add_opts(self, optparser): optlist = [ optparse.make_option("--bbf", dest="bbf", action="store_true", help="Validate the module(s) according to " \ "BBF rules."), ] optparser.add_options(optlist) def setup_ctx(self, ctx): if not ctx.opts.bbf: return self._setup_ctx(ctx) if ctx.max_line_len is None: ctx.max_line_len = 70
Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70
Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70 This is to match the settings that BBF uses when validating its modules. The max_line_len setting won't override an explicit --max-line-len from the command line.
Python
isc
mbj4668/pyang,mbj4668/pyang
--- +++ @@ -15,6 +15,7 @@ lint.LintPlugin.__init__(self) self.namespace_prefixes = ['urn:bbf:yang:'] self.modulename_prefixes = ['bbf'] + self.ensure_hyphenated_names = True def add_opts(self, optparser): optlist = [ @@ -30,3 +31,5 @@ if not ctx.opts.bbf: return self._setup_ctx(ctx) + if ctx.max_line_len is None: + ctx.max_line_len = 70
dc6af73616163463bf2c5feac97ac1473ea76e07
proj/proj/models/user.py
proj/proj/models/user.py
# coding: utf-8 import datetime from ._base import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), unique=True) email = db.Column(db.String(50)) avatar = db.Column(db.String(200)) created_at = db.Column(db.DateTime, default=datetime.datetime.now) def __repr__(self): return '<User %s>' % self.name
# coding: utf-8 import datetime from ._base import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), unique=True) email = db.Column(db.String(50)) avatar = db.Column(db.String(200)) password = db.Column(db.String(200)) created_at = db.Column(db.DateTime, default=datetime.datetime.now) def __repr__(self): return '<User %s>' % self.name
Add password to User model.
Add password to User model.
Python
mit
1045347128/Flask-Boost,hustlzp/Flask-Boost,hustlzp/Flask-Boost,1045347128/Flask-Boost,1045347128/Flask-Boost,1045347128/Flask-Boost,hustlzp/Flask-Boost,hustlzp/Flask-Boost
--- +++ @@ -8,6 +8,7 @@ name = db.Column(db.String(50), unique=True) email = db.Column(db.String(50)) avatar = db.Column(db.String(200)) + password = db.Column(db.String(200)) created_at = db.Column(db.DateTime, default=datetime.datetime.now) def __repr__(self):
1a0339b85d852526c184eeace73021fc7d68b2c6
python_dispatcher.py
python_dispatcher.py
import traceback from routes import Mapper import ppp_core import example_ppp_module as flower import ppp_questionparsing_grammatical as qp_grammatical import ppp_cas #import ppp_nlp_ml_standalone class Application: def __init__(self): self.mapper = Mapper() self.mapper.connect('core', '/core/', app=ppp_core.app) self.mapper.connect('qp_grammatical', '/qp_grammatical/', app=qp_grammatical.app) self.mapper.connect('flower', '/flower/', app=flower.app) self.mapper.connect('cas', '/cas/', app=ppp_cas.app) self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_cas.app) #self.mapper.connect('nlp_ml_standalone', '/nlp_ml_standalone/', app=ppp_nlp_ml_standalone.app) def __call__(self, environ, start_response): match = self.mapper.routematch(environ=environ) app = match[0]['app'] if match else self.not_found try: return app(environ, start_response) except KeyboardInterrupt: raise except Exception as e: traceback.print_exc(e) def not_found(self, environ, start_response): headers = [('Content-Type', 'text/plain')] start_response('404 Not Found', headers) return [b'Not found.'] app = Application()
import traceback from routes import Mapper import ppp_core import example_ppp_module as flower import ppp_questionparsing_grammatical as qp_grammatical import ppp_cas import ppp_spell_checker #import ppp_nlp_ml_standalone class Application: def __init__(self): self.mapper = Mapper() self.mapper.connect('core', '/core/', app=ppp_core.app) self.mapper.connect('qp_grammatical', '/qp_grammatical/', app=qp_grammatical.app) self.mapper.connect('flower', '/flower/', app=flower.app) self.mapper.connect('cas', '/cas/', app=ppp_cas.app) self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_spell_checker.app) #self.mapper.connect('nlp_ml_standalone', '/nlp_ml_standalone/', app=ppp_nlp_ml_standalone.app) def __call__(self, environ, start_response): match = self.mapper.routematch(environ=environ) app = match[0]['app'] if match else self.not_found try: return app(environ, start_response) except KeyboardInterrupt: raise except Exception as e: traceback.print_exc(e) def not_found(self, environ, start_response): headers = [('Content-Type', 'text/plain')] start_response('404 Not Found', headers) return [b'Not found.'] app = Application()
Fix name of spell checker.
Fix name of spell checker.
Python
cc0-1.0
ProjetPP/Deployment,ProjetPP/Deployment,ProjetPP/Deployment
--- +++ @@ -5,6 +5,7 @@ import example_ppp_module as flower import ppp_questionparsing_grammatical as qp_grammatical import ppp_cas +import ppp_spell_checker #import ppp_nlp_ml_standalone class Application: @@ -14,7 +15,7 @@ self.mapper.connect('qp_grammatical', '/qp_grammatical/', app=qp_grammatical.app) self.mapper.connect('flower', '/flower/', app=flower.app) self.mapper.connect('cas', '/cas/', app=ppp_cas.app) - self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_cas.app) + self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_spell_checker.app) #self.mapper.connect('nlp_ml_standalone', '/nlp_ml_standalone/', app=ppp_nlp_ml_standalone.app) def __call__(self, environ, start_response):
89b54d9c7fec213465446148e39612a2ac659ca2
test/common/test_openstack.py
test/common/test_openstack.py
import sys import unittest from mock import Mock from libcloud.common.openstack import OpenStackBaseConnection class OpenStackBaseConnectionTest(unittest.TestCase): def setUp(self): self.timeout = 10 OpenStackBaseConnection.conn_classes = (None, Mock()) self.connection = OpenStackBaseConnection('foo', 'bar', timeout=self.timeout, ex_force_auth_url='https://127.0.0.1') self.connection.driver = Mock() self.connection.driver.name = 'OpenStackDriver' def test_base_connection_timeout(self): self.connection.connect() self.assertEquals(self.connection.timeout, self.timeout) self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', port=443, timeout=10) if __name__ == '__main__': sys.exit(unittest.main())
import sys import unittest from mock import Mock from libcloud.common.openstack import OpenStackBaseConnection from libcloud.utils.py3 import PY25 class OpenStackBaseConnectionTest(unittest.TestCase): def setUp(self): self.timeout = 10 OpenStackBaseConnection.conn_classes = (None, Mock()) self.connection = OpenStackBaseConnection('foo', 'bar', timeout=self.timeout, ex_force_auth_url='https://127.0.0.1') self.connection.driver = Mock() self.connection.driver.name = 'OpenStackDriver' def test_base_connection_timeout(self): self.connection.connect() self.assertEquals(self.connection.timeout, self.timeout) if PY25: self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', port=443) else: self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', port=443, timeout=10) if __name__ == '__main__': sys.exit(unittest.main())
Fix test so it works with python 2.5.
Fix test so it works with python 2.5. git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1342997 13f79535-47bb-0310-9956-ffa450edef68
Python
apache-2.0
Jc2k/libcloud,marcinzaremba/libcloud,Scalr/libcloud,mathspace/libcloud,Verizon/libcloud,DimensionDataCBUSydney/libcloud,Cloud-Elasticity-Services/as-libcloud,lochiiconnectivity/libcloud,MrBasset/libcloud,sfriesel/libcloud,wrigri/libcloud,Itxaka/libcloud,erjohnso/libcloud,jerryblakley/libcloud,Scalr/libcloud,marcinzaremba/libcloud,JamesGuthrie/libcloud,watermelo/libcloud,iPlantCollaborativeOpenSource/libcloud,StackPointCloud/libcloud,niteoweb/libcloud,atsaki/libcloud,pquentin/libcloud,mistio/libcloud,wrigri/libcloud,cryptickp/libcloud,illfelder/libcloud,carletes/libcloud,andrewsomething/libcloud,kater169/libcloud,Itxaka/libcloud,aviweit/libcloud,apache/libcloud,illfelder/libcloud,SecurityCompass/libcloud,niteoweb/libcloud,SecurityCompass/libcloud,wrigri/libcloud,mtekel/libcloud,schaubl/libcloud,mistio/libcloud,wuyuewen/libcloud,ZuluPro/libcloud,vongazman/libcloud,munkiat/libcloud,cloudControl/libcloud,mathspace/libcloud,DimensionDataCBUSydney/libcloud,sgammon/libcloud,ninefold/libcloud,lochiiconnectivity/libcloud,ByteInternet/libcloud,jimbobhickville/libcloud,mtekel/libcloud,sahildua2305/libcloud,sahildua2305/libcloud,sahildua2305/libcloud,aleGpereira/libcloud,StackPointCloud/libcloud,kater169/libcloud,ClusterHQ/libcloud,niteoweb/libcloud,andrewsomething/libcloud,Scalr/libcloud,ByteInternet/libcloud,DimensionDataCBUSydney/libcloud,dcorbacho/libcloud,carletes/libcloud,carletes/libcloud,Verizon/libcloud,jimbobhickville/libcloud,wido/libcloud,iPlantCollaborativeOpenSource/libcloud,wuyuewen/libcloud,schaubl/libcloud,Kami/libcloud,wuyuewen/libcloud,munkiat/libcloud,sergiorua/libcloud,munkiat/libcloud,erjohnso/libcloud,pantheon-systems/libcloud,mathspace/libcloud,aviweit/libcloud,sergiorua/libcloud,thesquelched/libcloud,aviweit/libcloud,SecurityCompass/libcloud,mgogoulos/libcloud,thesquelched/libcloud,kater169/libcloud,curoverse/libcloud,NexusIS/libcloud,jerryblakley/libcloud,Kami/libcloud,marcinzaremba/libcloud,erjohnso/libcloud,cryptickp/libcloud,samuelchong/libcloud,aleGpereira/libcloud,curoverse/libcloud,sfriesel/libcloud,briancurtin/libcloud,samuelchong/libcloud,schaubl/libcloud,supertom/libcloud,vongazman/libcloud,sgammon/libcloud,MrBasset/libcloud,NexusIS/libcloud,Cloud-Elasticity-Services/as-libcloud,ZuluPro/libcloud,curoverse/libcloud,aleGpereira/libcloud,watermelo/libcloud,cloudControl/libcloud,pquentin/libcloud,dcorbacho/libcloud,apache/libcloud,JamesGuthrie/libcloud,briancurtin/libcloud,ZuluPro/libcloud,cloudControl/libcloud,t-tran/libcloud,smaffulli/libcloud,pantheon-systems/libcloud,watermelo/libcloud,briancurtin/libcloud,iPlantCollaborativeOpenSource/libcloud,Cloud-Elasticity-Services/as-libcloud,jimbobhickville/libcloud,StackPointCloud/libcloud,mistio/libcloud,vongazman/libcloud,andrewsomething/libcloud,atsaki/libcloud,supertom/libcloud,Jc2k/libcloud,techhat/libcloud,thesquelched/libcloud,pantheon-systems/libcloud,Verizon/libcloud,mbrukman/libcloud,techhat/libcloud,dcorbacho/libcloud,ByteInternet/libcloud,smaffulli/libcloud,t-tran/libcloud,wido/libcloud,atsaki/libcloud,mtekel/libcloud,Kami/libcloud,techhat/libcloud,smaffulli/libcloud,sfriesel/libcloud,t-tran/libcloud,ClusterHQ/libcloud,mbrukman/libcloud,mgogoulos/libcloud,apache/libcloud,pquentin/libcloud,NexusIS/libcloud,sergiorua/libcloud,mbrukman/libcloud,jerryblakley/libcloud,supertom/libcloud,mgogoulos/libcloud,MrBasset/libcloud,JamesGuthrie/libcloud,ninefold/libcloud,samuelchong/libcloud,lochiiconnectivity/libcloud,cryptickp/libcloud,illfelder/libcloud,Itxaka/libcloud,wido/libcloud
--- +++ @@ -4,6 +4,7 @@ from mock import Mock from libcloud.common.openstack import OpenStackBaseConnection +from libcloud.utils.py3 import PY25 class OpenStackBaseConnectionTest(unittest.TestCase): @@ -20,9 +21,13 @@ def test_base_connection_timeout(self): self.connection.connect() self.assertEquals(self.connection.timeout, self.timeout) - self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', - port=443, - timeout=10) + if PY25: + self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', + port=443) + else: + self.connection.conn_classes[1].assert_called_with(host='127.0.0.1', + port=443, + timeout=10) if __name__ == '__main__':
f8d90e92ce791650dc89944fca009fc36d9e3a90
crawler/wikitravel-optimize-articles.py
crawler/wikitravel-optimize-articles.py
#!/opt/local/bin/python import json import os import re import string import sys myPath = os.path.dirname(os.path.realpath(__file__)) for i, line in enumerate(sys.stdin): (url, title, fileBase) = json.loads(line) fileName = fileBase + '.article' outFileName = fileName + '.opt' if os.path.exists(outFileName): sys.stderr.write('Skipping existing {0} {1}\n'.format(i, fileName)) else: sys.stderr.write('Optimizing {0} {1}\n'.format(i, fileName)) assert 0 == os.system('java -jar {myPath}/htmlcompressor.jar ' '--remove-intertag-spaces --simple-bool-attr --remove-quotes ' '--remove-js-protocol --type html ' '-o {outFileName} {fileName}' .format(myPath = myPath, fileName = fileName, outFileName = outFileName))
#!/opt/local/bin/python import json import os import re import string import sys myPath = os.path.dirname(os.path.realpath(__file__)) def formatPath(s): return s.replace('(', '\\(').replace(')', '\\)') for i, line in enumerate(sys.stdin): (url, title, fileBase) = json.loads(line) fileName = fileBase + '.article' outFileName = fileName + '.opt' if os.path.exists(outFileName): sys.stderr.write('Skipping existing {0} {1}\n'.format(i, fileName)) else: sys.stderr.write('Optimizing {0} {1}\n'.format(i, fileName)) assert 0 == os.system('java -jar {myPath}/htmlcompressor.jar ' '--remove-intertag-spaces --simple-bool-attr --remove-quotes ' '--remove-js-protocol --type html ' '-o {outFileName} {fileName}' .format(myPath = myPath, fileName = formatPath(fileName), outFileName = formatPath(outFileName)))
Handle correctly paths with symbols '(' and ')'.
[crawler] Handle correctly paths with symbols '(' and ')'.
Python
apache-2.0
VladiMihaylenko/omim,edl00k/omim,dobriy-eeh/omim,65apps/omim,65apps/omim,Zverik/omim,vasilenkomike/omim,UdjinM6/omim,syershov/omim,milchakov/omim,yunikkk/omim,ygorshenin/omim,alexzatsepin/omim,mapsme/omim,kw217/omim,dobriy-eeh/omim,AlexanderMatveenko/omim,vasilenkomike/omim,rokuz/omim,mpimenov/omim,andrewshadura/omim,goblinr/omim,simon247/omim,65apps/omim,stangls/omim,dobriy-eeh/omim,mpimenov/omim,milchakov/omim,ygorshenin/omim,mgsergio/omim,mgsergio/omim,igrechuhin/omim,guard163/omim,65apps/omim,alexzatsepin/omim,darina/omim,guard163/omim,kw217/omim,Komzpa/omim,mapsme/omim,igrechuhin/omim,sidorov-panda/omim,andrewshadura/omim,guard163/omim,augmify/omim,trashkalmar/omim,Transtech/omim,augmify/omim,therearesomewhocallmetim/omim,alexzatsepin/omim,gardster/omim,UdjinM6/omim,mgsergio/omim,dkorolev/omim,felipebetancur/omim,vasilenkomike/omim,sidorov-panda/omim,sidorov-panda/omim,matsprea/omim,darina/omim,victorbriz/omim,felipebetancur/omim,rokuz/omim,mapsme/omim,augmify/omim,65apps/omim,TimurTarasenko/omim,Endika/omim,syershov/omim,lydonchandra/omim,rokuz/omim,felipebetancur/omim,rokuz/omim,Transtech/omim,UdjinM6/omim,darina/omim,darina/omim,bykoianko/omim,mpimenov/omim,simon247/omim,mgsergio/omim,ygorshenin/omim,matsprea/omim,vasilenkomike/omim,gardster/omim,vng/omim,rokuz/omim,rokuz/omim,vng/omim,UdjinM6/omim,vladon/omim,programming086/omim,andrewshadura/omim,edl00k/omim,Saicheg/omim,trashkalmar/omim,TimurTarasenko/omim,mpimenov/omim,rokuz/omim,mpimenov/omim,sidorov-panda/omim,trashkalmar/omim,rokuz/omim,UdjinM6/omim,VladiMihaylenko/omim,VladiMihaylenko/omim,Zverik/omim,syershov/omim,guard163/omim,stangls/omim,mgsergio/omim,65apps/omim,therearesomewhocallmetim/omim,jam891/omim,lydonchandra/omim,ygorshenin/omim,guard163/omim,krasin/omim,syershov/omim,AlexanderMatveenko/omim,simon247/omim,victorbriz/omim,ygorshenin/omim,darina/omim,syershov/omim,andrewshadura/omim,victorbriz/omim,goblinr/omim,dobriy-eeh/omim,dobriy-eeh/omim,jam891/omim,vladon/omim,albertshift/omim,felipebetancur/omim,programming086/omim,Komzpa/omim,felipebetancur/omim,syershov/omim,vasilenkomike/omim,matsprea/omim,matsprea/omim,andrewshadura/omim,Komzpa/omim,mgsergio/omim,goblinr/omim,Zverik/omim,Komzpa/omim,goblinr/omim,Saicheg/omim,simon247/omim,stangls/omim,AlexanderMatveenko/omim,guard163/omim,alexzatsepin/omim,vasilenkomike/omim,programming086/omim,wersoo/omim,syershov/omim,edl00k/omim,albertshift/omim,milchakov/omim,edl00k/omim,ygorshenin/omim,krasin/omim,therearesomewhocallmetim/omim,Komzpa/omim,jam891/omim,vladon/omim,mpimenov/omim,VladiMihaylenko/omim,dkorolev/omim,vladon/omim,goblinr/omim,victorbriz/omim,goblinr/omim,bykoianko/omim,bykoianko/omim,edl00k/omim,edl00k/omim,UdjinM6/omim,ygorshenin/omim,vasilenkomike/omim,mgsergio/omim,stangls/omim,augmify/omim,victorbriz/omim,yunikkk/omim,Endika/omim,jam891/omim,Saicheg/omim,milchakov/omim,stangls/omim,UdjinM6/omim,sidorov-panda/omim,Zverik/omim,yunikkk/omim,VladiMihaylenko/omim,stangls/omim,jam891/omim,gardster/omim,dobriy-eeh/omim,felipebetancur/omim,darina/omim,VladiMihaylenko/omim,mpimenov/omim,victorbriz/omim,dkorolev/omim,darina/omim,igrechuhin/omim,albertshift/omim,vasilenkomike/omim,Transtech/omim,albertshift/omim,rokuz/omim,Endika/omim,krasin/omim,sidorov-panda/omim,VladiMihaylenko/omim,yunikkk/omim,albertshift/omim,goblinr/omim,stangls/omim,Transtech/omim,mgsergio/omim,VladiMihaylenko/omim,lydonchandra/omim,Zverik/omim,Saicheg/omim,yunikkk/omim,dobriy-eeh/omim,matsprea/omim,simon247/omim,syershov/omim,bykoianko/omim,trashkalmar/omim,augmify/omim,trashkalmar/omim,trashkalmar/omim,victorbriz/omim,Zverik/omim,mpimenov/omim,alexzatsepin/omim,programming086/omim,kw217/omim,darina/omim,yunikkk/omim,trashkalmar/omim,dobriy-eeh/omim,bykoianko/omim,alexzatsepin/omim,bykoianko/omim,felipebetancur/omim,UdjinM6/omim,mapsme/omim,programming086/omim,lydonchandra/omim,andrewshadura/omim,mapsme/omim,syershov/omim,andrewshadura/omim,stangls/omim,albertshift/omim,mapsme/omim,kw217/omim,goblinr/omim,milchakov/omim,mapsme/omim,goblinr/omim,victorbriz/omim,milchakov/omim,dobriy-eeh/omim,sidorov-panda/omim,augmify/omim,dkorolev/omim,TimurTarasenko/omim,bykoianko/omim,vasilenkomike/omim,wersoo/omim,wersoo/omim,TimurTarasenko/omim,therearesomewhocallmetim/omim,yunikkk/omim,yunikkk/omim,mgsergio/omim,dobriy-eeh/omim,Komzpa/omim,mapsme/omim,kw217/omim,gardster/omim,65apps/omim,guard163/omim,milchakov/omim,bykoianko/omim,andrewshadura/omim,felipebetancur/omim,Endika/omim,krasin/omim,goblinr/omim,wersoo/omim,AlexanderMatveenko/omim,vng/omim,matsprea/omim,jam891/omim,AlexanderMatveenko/omim,albertshift/omim,vladon/omim,UdjinM6/omim,Transtech/omim,mgsergio/omim,albertshift/omim,milchakov/omim,rokuz/omim,AlexanderMatveenko/omim,ygorshenin/omim,igrechuhin/omim,vladon/omim,guard163/omim,alexzatsepin/omim,wersoo/omim,sidorov-panda/omim,vng/omim,VladiMihaylenko/omim,Transtech/omim,dkorolev/omim,igrechuhin/omim,Zverik/omim,syershov/omim,programming086/omim,dobriy-eeh/omim,yunikkk/omim,AlexanderMatveenko/omim,Transtech/omim,mpimenov/omim,kw217/omim,Transtech/omim,mgsergio/omim,albertshift/omim,Endika/omim,trashkalmar/omim,Saicheg/omim,dobriy-eeh/omim,darina/omim,Volcanoscar/omim,augmify/omim,igrechuhin/omim,augmify/omim,Zverik/omim,TimurTarasenko/omim,milchakov/omim,lydonchandra/omim,AlexanderMatveenko/omim,darina/omim,edl00k/omim,vladon/omim,sidorov-panda/omim,AlexanderMatveenko/omim,rokuz/omim,matsprea/omim,igrechuhin/omim,Endika/omim,65apps/omim,Saicheg/omim,Volcanoscar/omim,goblinr/omim,dkorolev/omim,stangls/omim,65apps/omim,wersoo/omim,Zverik/omim,Volcanoscar/omim,gardster/omim,Zverik/omim,therearesomewhocallmetim/omim,mpimenov/omim,simon247/omim,dkorolev/omim,AlexanderMatveenko/omim,alexzatsepin/omim,VladiMihaylenko/omim,gardster/omim,syershov/omim,simon247/omim,igrechuhin/omim,matsprea/omim,krasin/omim,jam891/omim,gardster/omim,krasin/omim,programming086/omim,alexzatsepin/omim,andrewshadura/omim,mgsergio/omim,sidorov-panda/omim,therearesomewhocallmetim/omim,kw217/omim,mpimenov/omim,gardster/omim,trashkalmar/omim,dobriy-eeh/omim,jam891/omim,vladon/omim,milchakov/omim,wersoo/omim,Volcanoscar/omim,ygorshenin/omim,stangls/omim,Komzpa/omim,lydonchandra/omim,Volcanoscar/omim,wersoo/omim,syershov/omim,guard163/omim,therearesomewhocallmetim/omim,lydonchandra/omim,lydonchandra/omim,guard163/omim,syershov/omim,krasin/omim,victorbriz/omim,alexzatsepin/omim,lydonchandra/omim,stangls/omim,andrewshadura/omim,mapsme/omim,Zverik/omim,mapsme/omim,Transtech/omim,dkorolev/omim,therearesomewhocallmetim/omim,dkorolev/omim,darina/omim,Endika/omim,mpimenov/omim,VladiMihaylenko/omim,felipebetancur/omim,65apps/omim,darina/omim,ygorshenin/omim,TimurTarasenko/omim,vng/omim,programming086/omim,simon247/omim,65apps/omim,Transtech/omim,edl00k/omim,TimurTarasenko/omim,Saicheg/omim,TimurTarasenko/omim,alexzatsepin/omim,vng/omim,gardster/omim,mapsme/omim,kw217/omim,VladiMihaylenko/omim,wersoo/omim,rokuz/omim,bykoianko/omim,vladon/omim,lydonchandra/omim,matsprea/omim,Volcanoscar/omim,rokuz/omim,bykoianko/omim,Saicheg/omim,mapsme/omim,Transtech/omim,jam891/omim,augmify/omim,Endika/omim,UdjinM6/omim,Transtech/omim,programming086/omim,Saicheg/omim,milchakov/omim,Saicheg/omim,wersoo/omim,bykoianko/omim,Komzpa/omim,goblinr/omim,edl00k/omim,gardster/omim,darina/omim,ygorshenin/omim,vng/omim,Zverik/omim,kw217/omim,TimurTarasenko/omim,victorbriz/omim,krasin/omim,Volcanoscar/omim,alexzatsepin/omim,trashkalmar/omim,edl00k/omim,Komzpa/omim,ygorshenin/omim,simon247/omim,alexzatsepin/omim,Komzpa/omim,Endika/omim,Endika/omim,igrechuhin/omim,milchakov/omim,krasin/omim,TimurTarasenko/omim,vng/omim,mpimenov/omim,vng/omim,mapsme/omim,therearesomewhocallmetim/omim,igrechuhin/omim,vladon/omim,bykoianko/omim,programming086/omim,vng/omim,felipebetancur/omim,kw217/omim,yunikkk/omim,milchakov/omim,Volcanoscar/omim,Volcanoscar/omim,krasin/omim,VladiMihaylenko/omim,augmify/omim,Volcanoscar/omim,dkorolev/omim,vasilenkomike/omim,trashkalmar/omim,albertshift/omim,bykoianko/omim,matsprea/omim,simon247/omim,therearesomewhocallmetim/omim,trashkalmar/omim,Zverik/omim,jam891/omim,goblinr/omim,yunikkk/omim
--- +++ @@ -6,6 +6,9 @@ import sys myPath = os.path.dirname(os.path.realpath(__file__)) + +def formatPath(s): + return s.replace('(', '\\(').replace(')', '\\)') for i, line in enumerate(sys.stdin): (url, title, fileBase) = json.loads(line) @@ -19,4 +22,6 @@ '--remove-intertag-spaces --simple-bool-attr --remove-quotes ' '--remove-js-protocol --type html ' '-o {outFileName} {fileName}' - .format(myPath = myPath, fileName = fileName, outFileName = outFileName)) + .format(myPath = myPath, + fileName = formatPath(fileName), + outFileName = formatPath(outFileName)))
5389fb8575251e2bd8ed18d96f4aa615e9a37bfa
deploy.py
deploy.py
#!/usr/bin/env python import argparse import os import requests my_domain = "www.proporti.onl" username = "emptysquare" parser = argparse.ArgumentParser() parser.add_argument( "token", metavar="PYTHON_ANYWHERE_TOKEN", help="A Python Anywhere API token for your account", ) args = parser.parse_args() print("Rsync files....") os.system( "rsync -rv --exclude '*.pyc' *" " emptysquare@ssh.pythonanywhere.com:www.proporti.onl/" ) print("Reinstall dependencies....") os.system( "ssh emptysquare@ssh.pythonanywhere.com" " '~/my-venv3/bin/pip install -U -r ~/www.proporti.onl/requirements.txt'" ) print("Restarting....") uri = "https://www.pythonanywhere.com/api/v0/user/{uname}/webapps/{dom}/reload/" response = requests.post( uri.format(uname=username, dom=my_domain), headers={"Authorization": "Token {token}".format(token=args.token)}, ) if response.status_code == 200: print("All OK") else: print( "Got unexpected status code {}: {!r}".format( response.status_code, response.content ) )
#!/usr/bin/env python import argparse import os import requests my_domain = "www.proporti.onl" username = "emptysquare" parser = argparse.ArgumentParser() parser.add_argument( "token", metavar="PYTHON_ANYWHERE_TOKEN", help="A Python Anywhere API token for your account", ) args = parser.parse_args() print("Rsync files....") os.system( "rsync -rv --exclude '*.pyc' *" " emptysquare@ssh.pythonanywhere.com:www.proporti.onl/" ) print("Reinstall dependencies....") os.system( "ssh emptysquare@ssh.pythonanywhere.com" " '~/proporti.onl.venv/bin/pip install -U -r ~/www.proporti.onl/requirements.txt'" ) print("Restarting....") uri = "https://www.pythonanywhere.com/api/v0/user/{uname}/webapps/{dom}/reload/" response = requests.post( uri.format(uname=username, dom=my_domain), headers={"Authorization": "Token {token}".format(token=args.token)}, ) if response.status_code == 200: print("All OK") else: print( "Got unexpected status code {}: {!r}".format( response.status_code, response.content ) )
Fix virtualenv path on PythonAnywhere
Fix virtualenv path on PythonAnywhere
Python
apache-2.0
ajdavis/twitter-gender-ratio,ajdavis/twitter-gender-distribution,ajdavis/twitter-gender-distribution,ajdavis/twitter-gender-ratio
--- +++ @@ -25,7 +25,7 @@ print("Reinstall dependencies....") os.system( "ssh emptysquare@ssh.pythonanywhere.com" - " '~/my-venv3/bin/pip install -U -r ~/www.proporti.onl/requirements.txt'" + " '~/proporti.onl.venv/bin/pip install -U -r ~/www.proporti.onl/requirements.txt'" ) print("Restarting....")
81ca235178a742e0041f2483d1f80d367d77264d
markov.py
markov.py
import random class Markov: def __init__(self, source, k=5): self.source = source self.k = k self._init_source() def _init_source(self): self.seeds = {} for i in range(len(self.source) - self.k - 1): seed = tuple(self.source[i:i+self.k]) if seed not in self.seeds: self.seeds[seed] = [] self.seeds[seed].append(self.source[i+self.k]) print('Markov dict initialized with {} keys'.format(len(self.seeds.keys()))) def chain(self, length=50, seed=None): if not seed or seed not in self.seeds: seed = random.choice(list(self.seeds.keys())) output = [] while len(output) < length: if seed not in self.seeds: seed = random.choice(list(self.seeds.keys())) next = random.choice(self.seeds[seed]) output.append(next) seed = tuple(list(seed[1:]) + [next]) return ' '.join(output) def find_seed(self, start_word): seeds = list(self.seeds.keys()) seeds = list(filter(lambda s: start_word in s, seeds)) return random.choice(seeds)
import random class Markov: def __init__(self, source, k=5): self.source = source self.k = k self._init_source() def _init_source(self): self.seeds = {} for i in range(len(self.source) - self.k - 1): seed = tuple(self.source[i:i+self.k]) if seed not in self.seeds: self.seeds[seed] = [] self.seeds[seed].append(self.source[i+self.k]) print('Markov dict initialized with {} keys'.format(len(self.seeds.keys()))) def chain(self, length=50, seed=None): if not seed or seed not in self.seeds: seed = random.choice(list(self.seeds.keys())) output = [] while len(output) < length: if seed not in self.seeds: seed = random.choice(list(self.seeds.keys())) next = random.choice(self.seeds[seed]) output.append(next) seed = tuple(list(seed[1:]) + [next]) return ' '.join(output) def find_seed(self, start_word): seeds = list(self.seeds.keys()) seeds = list(filter(lambda s: start_word in s, seeds)) if len(seeds) == 0: return None return random.choice(seeds)
Fix find_seed behavior when the word is not present
Fix find_seed behavior when the word is not present
Python
mit
calzoneman/MarkovBot,calzoneman/MarkovBot
--- +++ @@ -30,4 +30,6 @@ def find_seed(self, start_word): seeds = list(self.seeds.keys()) seeds = list(filter(lambda s: start_word in s, seeds)) + if len(seeds) == 0: + return None return random.choice(seeds)
09a395526dadac36f295674e01121818278ac91f
kobold/hash_functions.py
kobold/hash_functions.py
def merge(default, to_mutate): for key, value in default.items(): to_mutate.setdefault(key, value) return to_mutate def combine(default, extra): new = {} for key, value in default.items(): new[key] = value for key, value in extra.items(): new[key] = value return new
def project(hash_in, attributes): return {key: value for (key, value) in hash_in.iteritems() if key in attributes} def merge(default, to_mutate): for key, value in default.items(): to_mutate.setdefault(key, value) return to_mutate def combine(default, extra): new = {} for key, value in default.items(): new[key] = value for key, value in extra.items(): new[key] = value return new
Add a "project" hash function, for projecting certain keys out of a dict
Add a "project" hash function, for projecting certain keys out of a dict
Python
mit
krieghan/kobold_python,krieghan/kobold_python
--- +++ @@ -1,3 +1,6 @@ +def project(hash_in, attributes): + return {key: value for (key, value) in hash_in.iteritems() if key in attributes} + def merge(default, to_mutate): for key, value in default.items(): to_mutate.setdefault(key, value)
f3d3750986a8710c54c110c43c00fa152dbbd383
src/hades/bin/su.py
src/hades/bin/su.py
import grp import logging import os import pwd import sys from hades.common.cli import ( ArgumentParser, parser as common_parser, setup_cli_logging, ) logger = logging.getLogger(__name__) def drop_privileges(passwd, group): if os.geteuid() != 0: logger.error("Can't drop privileges (EUID != 0)") return os.setgid(group.gr_gid) os.initgroups(passwd.pw_name, group.gr_gid) os.setuid(passwd.pw_uid) def main(): parser = ArgumentParser(parents=[common_parser]) parser.add_argument('user') parser.add_argument('command') parser.add_argument('arguments', nargs='*') args = parser.parse_args() setup_cli_logging(parser.prog, args) try: passwd = pwd.getpwnam(args.user) group = grp.getgrgid(passwd.pw_gid) except KeyError: logger.critical("No such user or group") return os.EX_NOUSER filename = args.command try: drop_privileges(passwd, group) os.execvp(filename, [filename] + args.arguments) except (FileNotFoundError, PermissionError): logger.critical("Could not execute {}".format(filename), file=sys.stderr) return os.EX_NOINPUT except OSError: logger.exception("An OSError occurred") return os.EX_OSERR if __name__ == '__main__': sys.exit(main())
import grp import logging import os import pwd import sys from hades.common.cli import ( ArgumentParser, parser as common_parser, setup_cli_logging, ) logger = logging.getLogger(__name__) def drop_privileges(passwd, group): os.setgid(group.gr_gid) os.initgroups(passwd.pw_name, group.gr_gid) os.setuid(passwd.pw_uid) def main(): parser = ArgumentParser(parents=[common_parser]) parser.add_argument('user') parser.add_argument('command') parser.add_argument('arguments', nargs='*') args = parser.parse_args() setup_cli_logging(parser.prog, args) try: passwd = pwd.getpwnam(args.user) group = grp.getgrgid(passwd.pw_gid) except KeyError: logger.critical("No such user or group") return os.EX_NOUSER filename = args.command try: drop_privileges(passwd, group) except PermissionError: logging.exception("Can't drop privileges") return os.EX_NOPERM try: os.execvp(filename, [filename] + args.arguments) except (FileNotFoundError, PermissionError): logger.critical("Could not execute {}".format(filename), file=sys.stderr) return os.EX_NOINPUT except OSError: logger.exception("An OSError occurred") return os.EX_OSERR if __name__ == '__main__': sys.exit(main())
Abort if privileges can't be dropped
Abort if privileges can't be dropped
Python
mit
agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades
--- +++ @@ -12,9 +12,6 @@ def drop_privileges(passwd, group): - if os.geteuid() != 0: - logger.error("Can't drop privileges (EUID != 0)") - return os.setgid(group.gr_gid) os.initgroups(passwd.pw_name, group.gr_gid) os.setuid(passwd.pw_uid) @@ -36,6 +33,10 @@ filename = args.command try: drop_privileges(passwd, group) + except PermissionError: + logging.exception("Can't drop privileges") + return os.EX_NOPERM + try: os.execvp(filename, [filename] + args.arguments) except (FileNotFoundError, PermissionError): logger.critical("Could not execute {}".format(filename), file=sys.stderr)
258c24c86ebbcc4a4a347e916d520c0f98f82f90
reboot_router_claro3G.py
reboot_router_claro3G.py
#! /usr/bin/env python # -*- coding: utf-8 -*- import urllib2 as http # URL with GET to reboot router or status main page to tests #url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1' url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1' url_root = url_get_status # Handling HTTP Cookie - Session Cookie Router cookieprocessor = http.HTTPCookieProcessor() # Customize it Opener with CookieProcessor opener = http.build_opener(cookieprocessor) # Using here Opener + CookieProcessor http.install_opener(opener) # Open URL with Opener above payload_router = http.urlopen(url_root) # Print payload Request URL print "Payload %s" % payload_router.read()
#! /usr/bin/env python # -*- coding: utf-8 -*- import urllib2 as http # Data Router user_router = "user_here" pass_router = "password_here" ip_router = "IP_here" port_router = "80" # URL with filling the fields above, URL with GET to reboot router or status main page to tests url_get_reboot = "http://" + ip_router + ":" + port_router + "/log/in?un=" + user_router + "&pw=" + pass_router + "&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1" #url_get_status = "http://" + ip_router + ":" + port_router + "/log/in?un=" + user_router + "&pw=" + pass_router + "&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1" # Variable global to open URL url_root = url_get_reboot print "Processing URL: %s" % url_root # Handling HTTP Cookie - Session Cookie Router cookieprocessor = http.HTTPCookieProcessor() # Customize it Opener with CookieProcessor opener = http.build_opener(cookieprocessor) # Using here Opener + CookieProcessor http.install_opener(opener) # Open URL with Opener above payload_router = http.urlopen(url_root) # Print payload Request URL print "Payload %s" % payload_router.read()
Add variables with info access router
Add variables with info access router
Python
apache-2.0
cleitonbueno/reboot_router
--- +++ @@ -3,10 +3,22 @@ import urllib2 as http -# URL with GET to reboot router or status main page to tests -#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1' -url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1' -url_root = url_get_status + +# Data Router +user_router = "user_here" +pass_router = "password_here" +ip_router = "IP_here" +port_router = "80" + + +# URL with filling the fields above, URL with GET to reboot router or status main page to tests +url_get_reboot = "http://" + ip_router + ":" + port_router + "/log/in?un=" + user_router + "&pw=" + pass_router + "&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1" +#url_get_status = "http://" + ip_router + ":" + port_router + "/log/in?un=" + user_router + "&pw=" + pass_router + "&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1" + +# Variable global to open URL +url_root = url_get_reboot + +print "Processing URL: %s" % url_root # Handling HTTP Cookie - Session Cookie Router cookieprocessor = http.HTTPCookieProcessor()
5c7e83a20fe031e586a5a243c642506c9a2dbe9b
epoxy/metaclasses/scalar.py
epoxy/metaclasses/scalar.py
from graphql.core.type import GraphQLScalarType class ScalarMeta(type): def __new__(mcs, name, bases, attrs): if attrs.pop('abstract', False): return super(ScalarMeta, mcs).__new__(mcs, name, bases, attrs) registry = mcs._get_registry() cls = super(ScalarMeta, mcs).__new__(mcs, name, bases, attrs) cls._registry = registry instance = cls() serialize = getattr(instance, 'serialize') parse_literal = getattr(instance, 'parse_literal') parse_value = getattr(instance, 'parse_value') mcs._register(GraphQLScalarType( name=name, description=attrs.get('__doc__', None), serialize=serialize, parse_value=parse_value, parse_literal=parse_literal )) @staticmethod def _register(mutation): raise NotImplementedError('_register must be implemented in the sub-metaclass') @staticmethod def _get_registry(): raise NotImplementedError('_get_registry must be implemented in the sub-metaclass')
from graphql.core.type import GraphQLScalarType class ScalarMeta(type): def __new__(mcs, name, bases, attrs): if attrs.pop('abstract', False): return super(ScalarMeta, mcs).__new__(mcs, name, bases, attrs) registry = mcs._get_registry() cls = super(ScalarMeta, mcs).__new__(mcs, name, bases, attrs) cls._registry = registry instance = cls() serialize = getattr(instance, 'serialize') parse_literal = getattr(instance, 'parse_literal') parse_value = getattr(instance, 'parse_value') mcs._register(GraphQLScalarType( name=name, description=attrs.get('__doc__', None), serialize=serialize, parse_value=parse_value, parse_literal=parse_literal )) @staticmethod def _register(scalar): raise NotImplementedError('_register must be implemented in the sub-metaclass') @staticmethod def _get_registry(): raise NotImplementedError('_get_registry must be implemented in the sub-metaclass')
Rename some copy pasted variable.
Rename some copy pasted variable.
Python
mit
graphql-python/graphql-epoxy
--- +++ @@ -24,7 +24,7 @@ )) @staticmethod - def _register(mutation): + def _register(scalar): raise NotImplementedError('_register must be implemented in the sub-metaclass') @staticmethod
6dc16ffca3fb2b3e95c7399fffd9f48493a8b969
chatterbot/__init__.py
chatterbot/__init__.py
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.8.6' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.8.7' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
Update package version to 0.8.7
Update package version to 0.8.7
Python
bsd-3-clause
vkosuri/ChatterBot,gunthercox/ChatterBot
--- +++ @@ -3,7 +3,7 @@ """ from .chatterbot import ChatBot -__version__ = '0.8.6' +__version__ = '0.8.7' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
5e42ab2aa7e5537a995e8d3ca81a29299a077116
examples/01-web/13-wikia.py
examples/01-web/13-wikia.py
# -*- coding: utf-8 *-* import os, sys, pprint; sys.path.insert(0, os.path.join("..", "..")) from pattern.web import Wikia, WikiaArticleSet, URLTimeout # This example retrieves an article from Wikipedia (http://en.wikipedia.org). # A query requests the article's HTML source from the server, which can be quite slow. # It is a good idea to cache results from Wikipedia locally, # and to set a high timeout when calling Wikipedia.search(). domain = 'runescape' # popular wiki if len(sys.argv) > 1: domain = sys.argv[1] engine = Wikia(language="en",domain=domain) ArticleSet = WikiaArticleSet( engine, iterationLimit=200 ) counter = 0 try: for page in ArticleSet: print counter, page.title except URLTimeout: print "Timeout error."
# -*- coding: utf-8 *-* import os, sys, pprint; sys.path.insert(0, os.path.join("..", "..")) from pattern.web import Wikia, WikiaArticleSet, URLTimeout # This example retrieves an article from Wikipedia (http://en.wikipedia.org). # A query requests the article's HTML source from the server, which can be quite slow. # It is a good idea to cache results from Wikipedia locally, # and to set a high timeout when calling Wikipedia.search(). domain = 'runescape' # popular wiki if len(sys.argv) > 1: domain = sys.argv[1] engine = Wikia(language="en",domain=domain) ArticleSet = WikiaArticleSet( engine, iterationLimit=200 ) counter = 0 try: for page in ArticleSet: print counter, page.title counter = counter + 1 except URLTimeout: print "Timeout error."
Increment counter in wikia example per @satoru
Increment counter in wikia example per @satoru
Python
bsd-3-clause
z0by/pattern,abcht/pattern,bijandhakal/pattern,NTesla/pattern,loretoparisi/pattern,Sri0405/pattern,pombredanne/pattern,shuangsong/pattern,jatinmistry13/pattern,sfprime/pattern,z0by/pattern,arne-cl/pattern,rebeling/pattern,clips/pattern,jatinmistry13/pattern,bijandhakal/pattern,codeaudit/pattern-1,aoom/pattern,ashhher3/pattern,sfprime/pattern,hayd/pattern,shubhangiKishore/pattern,woiddei/pattern,woiddei/pattern,loretoparisi/pattern,aoom/pattern,shubhangiKishore/pattern,dongguangming/pattern,woiddei/pattern,jatinmistry13/pattern,EricSchles/pattern,ashhher3/pattern,pattern3/pattern,sfprime/pattern,clips/pattern,bijandhakal/pattern,arne-cl/pattern,soycode/pattern,elkingtonmcb/pattern,aoom/pattern,shuangsong/pattern,pattern3/pattern,EricSchles/pattern,NTesla/pattern,elkingtonmcb/pattern,abcht/pattern,NTesla/pattern,z0by/pattern,bijandhakal/pattern,EricSchles/pattern,elkingtonmcb/pattern,pombredanne/pattern,hayd/pattern,shuangsong/pattern,hayd/pattern,soycode/pattern,NTesla/pattern,Sri0405/pattern,shubhangiKishore/pattern,elkingtonmcb/pattern,loretoparisi/pattern,shuangsong/pattern,codeaudit/pattern-1,aoom/pattern,RitwikGupta/pattern,z0by/pattern,arne-cl/pattern,woiddei/pattern,abcht/pattern,EricSchles/pattern,Sri0405/pattern,clips/pattern,soycode/pattern,rebeling/pattern,ashhher3/pattern,abcht/pattern,RitwikGupta/pattern,RitwikGupta/pattern,textioHQ/pattern,dongguangming/pattern,dongguangming/pattern,pombredanne/pattern,pattern3/pattern,Sri0405/pattern,loretoparisi/pattern,sfprime/pattern,codeaudit/pattern-1,codeaudit/pattern-1,jatinmistry13/pattern,soycode/pattern,ashhher3/pattern,rebeling/pattern,pombredanne/pattern,arne-cl/pattern,rebeling/pattern,RitwikGupta/pattern,dongguangming/pattern
--- +++ @@ -20,5 +20,6 @@ try: for page in ArticleSet: print counter, page.title + counter = counter + 1 except URLTimeout: print "Timeout error."
4af368b3d3a4f5cfb8b78e19827c99078fb5ccab
client.py
client.py
#!/usr/bin/env python3 import unittest import http.client url = "localhost:8000" class Client: def test_Connect(self): connected = 0 try: self.conn = http.client.HTTPConnection(url) self.conn.connect() connected = 1 except Exception: print(Exception) return connected def test_Close(self): self.conn.close() return 1 class TestServer(unittest.TestCase): def test_Scenario1(self): cli = Client() for i in range(10): self.assertEqual(cli.test_Connect(), 1) self.assertEqual(cli.test_Close(), 1) if __name__ == "__main__": unittest.main()
#!/usr/bin/env python3 import unittest import http.client url = "localhost:8000" class Client: def test_Connect(self): connected = 0 try: self.conn = http.client.HTTPConnection(url) self.conn.connect() connected = 1 except Exception: print(Exception) return connected def test_RequstIndex(self): res = None self.conn.request("GET", "/") res = self.conn.getresponse() return res def test_Close(self): self.conn.close() return 1 class TestServer(unittest.TestCase): def test_Scenario1(self): cli = Client() for i in range(10): self.assertEqual(cli.test_Connect(), 1) self.assertEqual(cli.test_Close(), 1) def test_Scenario2(self): for i in range(10): cli = Client() self.assertEqual(cli.test_Connect(), 1) res = cli.test_RequstIndex() self.assertIsNotNone(res) self.assertEqual(res.status, 200) self.assertEqual(res.read(22), b"<html><body>Hello!<br>") self.assertEqual(cli.test_Close(), 1) if __name__ == "__main__": unittest.main()
Add request index page test.
Add request index page test.
Python
bsd-3-clause
starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer
--- +++ @@ -16,6 +16,12 @@ print(Exception) return connected + + def test_RequstIndex(self): + res = None + self.conn.request("GET", "/") + res = self.conn.getresponse() + return res def test_Close(self): self.conn.close() @@ -28,5 +34,15 @@ self.assertEqual(cli.test_Connect(), 1) self.assertEqual(cli.test_Close(), 1) + def test_Scenario2(self): + for i in range(10): + cli = Client() + self.assertEqual(cli.test_Connect(), 1) + res = cli.test_RequstIndex() + self.assertIsNotNone(res) + self.assertEqual(res.status, 200) + self.assertEqual(res.read(22), b"<html><body>Hello!<br>") + self.assertEqual(cli.test_Close(), 1) + if __name__ == "__main__": unittest.main()
00a29c535dc699b5bbbc7b6eb9d439d289c8de18
common.py
common.py
import datetime import hashlib import os # hack to override sqlite database filename # see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148 os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite' import scraperwiki def store_history(data, table): """ store a hash of the content with a timestamp so we can work out when the content has changed from inside the scraper """ hash_record = { 'timestamp': datetime.datetime.now(), 'table': table, 'content_hash': hashlib.sha1(data).hexdigest() } scraperwiki.sqlite.save( unique_keys=['timestamp'], data=hash_record, table_name='history') scraperwiki.sqlite.commit_transactions() def truncate(table): scraperwiki.sqlite.execute("DROP TABLE IF EXISTS %s;" % table) scraperwiki.sqlite.commit_transactions() def summarise(table): count = scraperwiki.sqlite.execute( "SELECT COUNT(*) AS count FROM %s;" % table) print("%i %s in database" % (count['data'][0].count, table))
import datetime import hashlib import os # hack to override sqlite database filename # see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148 os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite' import scraperwiki def store_history(data, table): """ store a hash of the content with a timestamp so we can work out when the content has changed from inside the scraper """ hash_record = { 'timestamp': datetime.datetime.now(), 'table': table, 'content_hash': hashlib.sha1(data).hexdigest(), 'raw_data': data, } scraperwiki.sqlite.save( unique_keys=['timestamp'], data=hash_record, table_name='history') scraperwiki.sqlite.commit_transactions() def truncate(table): scraperwiki.sqlite.execute("DROP TABLE IF EXISTS %s;" % table) scraperwiki.sqlite.commit_transactions() def summarise(table): count = scraperwiki.sqlite.execute( "SELECT COUNT(*) AS count FROM %s;" % table) print("%i %s in database" % (count['data'][0].count, table))
Store raw data as well as content hash
Store raw data as well as content hash ..for now. Trying to work out why the content hashes are changing when I am not expecting them to.
Python
mit
wdiv-scrapers/dc-base-scrapers
--- +++ @@ -16,7 +16,8 @@ hash_record = { 'timestamp': datetime.datetime.now(), 'table': table, - 'content_hash': hashlib.sha1(data).hexdigest() + 'content_hash': hashlib.sha1(data).hexdigest(), + 'raw_data': data, } scraperwiki.sqlite.save( unique_keys=['timestamp'],
7e89c3fbd08e4824cc84e46b8bad7289db801866
tests/test_story.py
tests/test_story.py
from py101 import Story from py101 import variables from py101 import lists import unittest class TestStory(unittest.TestCase): def test_name(self): self.assertEqual(Story().name, 'py101', "name should be py101") class TestAdventureVariables(unittest.TestCase): good_solution = """ myinteger = 4 mystring = 'Python String Here' print(myinteger) print(mystring) """ def test_solution(self): test = variables.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown() class TestAdventureLists(unittest.TestCase): good_solution = """ languages = ["ADA", "Pascal", "Fortran", "Smalltalk"] print(languages) """ def test_solution(self): test = lists.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown()
from py101 import Story from py101 import variables from py101 import lists import unittest class TestStory(unittest.TestCase): def test_name(self): self.assertEqual(Story().name, 'py101', "name should be py101") class TestAdventureVariables(unittest.TestCase): good_solution = """ myinteger = 4 mystring = 'Python String Here' print(myinteger) print(mystring) """ def test_solution(self): test = variables.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown() class TestAdventureLists(unittest.TestCase): good_solution = """ languages = ["ADA", "Pascal", "Fortran", "Smalltalk"] print(languages) """ def test_solution(self): test = lists.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown()
Add Tests for the lists adventure
Add Tests for the lists adventure
Python
mit
sophilabs/py101
--- +++ @@ -2,6 +2,7 @@ from py101 import variables from py101 import lists import unittest + class TestStory(unittest.TestCase): def test_name(self):
62da8b8a2774db2ccb725bc0c5a1598252ebf4a7
fuzzer/tasks.py
fuzzer/tasks.py
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) return fuzzer.found_crash()
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
Kill fuzzers when we've found a crash or timed out
Kill fuzzers when we've found a crash or timed out
Python
bsd-2-clause
shellphish/driller
--- +++ @@ -30,4 +30,7 @@ while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) + # make sure to kill the fuzzers when we're done + fuzzer.kill() + return fuzzer.found_crash()
c95dc576153f60c8c56b7b2c5bfac467ccd9dd97
gin/__init__.py
gin/__init__.py
# coding=utf-8 # Copyright 2018 The Gin-Config Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Init file for Gin.""" from gin.config import bind_parameter from gin.config import clear_config from gin.config import config_is_locked from gin.config import config_scope from gin.config import configurable from gin.config import constant from gin.config import current_scope from gin.config import current_scope_str from gin.config import enter_interactive_mode from gin.config import exit_interactive_mode from gin.config import external_configurable from gin.config import finalize from gin.config import operative_config_str from gin.config import parse_config from gin.config import parse_config_file from gin.config import parse_config_files_and_bindings from gin.config import query_parameter from gin.config import REQUIRED from gin.config import unlock_config
# coding=utf-8 # Copyright 2018 The Gin-Config Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Init file for Gin.""" from gin.config import bind_parameter from gin.config import clear_config from gin.config import config_is_locked from gin.config import config_scope from gin.config import configurable from gin.config import constant from gin.config import constants_from_enum from gin.config import current_scope from gin.config import current_scope_str from gin.config import enter_interactive_mode from gin.config import exit_interactive_mode from gin.config import external_configurable from gin.config import finalize from gin.config import operative_config_str from gin.config import parse_config from gin.config import parse_config_file from gin.config import parse_config_files_and_bindings from gin.config import query_parameter from gin.config import REQUIRED from gin.config import unlock_config
Add import for constants_from_enum to be able to use @gin.constants_from_enum
Add import for constants_from_enum to be able to use @gin.constants_from_enum PiperOrigin-RevId: 198401971
Python
apache-2.0
google/gin-config,google/gin-config
--- +++ @@ -21,6 +21,7 @@ from gin.config import config_scope from gin.config import configurable from gin.config import constant +from gin.config import constants_from_enum from gin.config import current_scope from gin.config import current_scope_str from gin.config import enter_interactive_mode
f1359fb6b8117a00afd833765646f03650df6a54
_lib/wordpress_post_processor.py
_lib/wordpress_post_processor.py
import sys import json import requests from string import Template import dateutil.parser def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: resp = requests.get(url, params={'json':1,'page':current_page}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): del post['comments'] # del post['content'] post['_id'] = post['slug'] # remove fields we're not interested in post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']] post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] author_template = Template("$first_name $last_name") post['author'] = [author['title'] for author in post['taxonomy_author']] dt = dateutil.parser.parse(post['date']) dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ') post['date'] = dt_string return post
import sys import json import requests from string import Template import dateutil.parser def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: resp = requests.get(url, params={'json':1,'page':current_page}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): del post['comments'] post['_id'] = post['slug'] # remove fields we're not interested in post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']] post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] author_template = Template("$first_name $last_name") post['author'] = [author['title'] for author in post['taxonomy_author']] dt = dateutil.parser.parse(post['date']) dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ') post['date'] = dt_string return post
Remove commented line we definitely will never need
Remove commented line we definitely will never need
Python
cc0-1.0
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
--- +++ @@ -28,7 +28,6 @@ def process_post(post): del post['comments'] - # del post['content'] post['_id'] = post['slug'] # remove fields we're not interested in post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
6d57372c270d980e0f7d662a60195e54f88b9be5
web/gunicorn.conf.py
web/gunicorn.conf.py
import os import multiprocessing proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME']) user = 'www-data' group = 'www-data' bind = '0.0.0.0:80' workers = multiprocessing.cpu_count() * 2 + 1 threads = workers
import os import multiprocessing proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME']) user = 'www-data' group = 'www-data' bind = '0.0.0.0:80' workers = multiprocessing.cpu_count() * 2 + 1 threads = workers accesslog = '-' errorlog = '-'
Make gunicorn log to stdout
Make gunicorn log to stdout
Python
mit
slava-sh/messenger,slava-sh/messenger,slava-sh/messenger,slava-sh/messenger
--- +++ @@ -7,3 +7,5 @@ bind = '0.0.0.0:80' workers = multiprocessing.cpu_count() * 2 + 1 threads = workers +accesslog = '-' +errorlog = '-'
61029c887729032c8c832e64dfb63c444a637931
alfred/__main__.py
alfred/__main__.py
#!/usr/bin/env python import os from argh import arg, ArghParser from argh.exceptions import CommandError from functools import wraps CONFIG = os.environ.get('ALFRED_CONFIG') def with_app(func): @wraps(func) @arg('--config', help='path to config') def wrapper(args): from alfred import create_app if not CONFIG and not args.config: raise CommandError('There is no config file specified') app = create_app(args.config or CONFIG) return func(app, args) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @with_app def runserver(app, args): app.run(args.host, args.port) @with_app def shell(app, args): from alfred.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) @with_app def collectassets(app, args): from alfred.assets import gears gears.get_environment(app).save() def main(): parser = ArghParser() parser.add_commands([runserver, shell, collectassets]) parser.dispatch() if __name__ == '__main__': main()
#!/usr/bin/env python import os from argh import arg, ArghParser from argh.exceptions import CommandError from functools import wraps CONFIG = os.environ.get('ALFRED_CONFIG') def with_app(func): @wraps(func) @arg('--config', help='path to config') def wrapper(args): from alfred import create_app if not CONFIG and not args.config: raise CommandError('There is no config file specified') app = create_app(args.config or CONFIG) return func(app, args) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @arg('--noreload', action='store_true', help='disable code reloader') @with_app def runserver(app, args): app.run(args.host, args.port, use_reloader=not args.noreload) @with_app def shell(app, args): from alfred.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) @with_app def collectassets(app, args): from alfred.assets import gears gears.get_environment(app).save() def main(): parser = ArghParser() parser.add_commands([runserver, shell, collectassets]) parser.dispatch() if __name__ == '__main__': main()
Add an option to disable code reloader to runserver command
Add an option to disable code reloader to runserver command
Python
isc
alfredhq/alfred,alfredhq/alfred
--- +++ @@ -23,9 +23,10 @@ @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') +@arg('--noreload', action='store_true', help='disable code reloader') @with_app def runserver(app, args): - app.run(args.host, args.port) + app.run(args.host, args.port, use_reloader=not args.noreload) @with_app
5493f78cd9225dc4a46689f09f75af59dfea9e16
shortuuidfield/__init__.py
shortuuidfield/__init__.py
try: VERSION = __import__('pkg_resources') \ .get_distribution('django-shortuuidfield').version except Exception, e: VERSION = 'unknown' from fields import ShortUUIDField
try: VERSION = __import__('pkg_resources') \ .get_distribution('django-shortuuidfield').version except Exception: VERSION = 'unknown' from fields import ShortUUIDField
Remove old-style assignment of raised exception
Remove old-style assignment of raised exception I've removed the old-style assignment of the ``Exception`` because it is no longer supported in Python 3.x. Since it is not used, I've removed it completely.
Python
bsd-3-clause
mriveralee/django-shortuuidfield,nebstrebor/django-shortuuidfield
--- +++ @@ -1,7 +1,7 @@ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-shortuuidfield').version -except Exception, e: +except Exception: VERSION = 'unknown' - + from fields import ShortUUIDField
bc3063a36655f436257beffd2d8815827f601a71
ui/widgets/Style.py
ui/widgets/Style.py
""" Style Contains convenience functions for styling widgets. :Authors: Berend Klein Haneveld """ import sys def styleWidgetForTab(widget): """ This function style a widget that can be used inside a QScrollArea that is inside a QTabWidget. On OS X the background color inside a tab widget is slightly darker than the default, so it has to be styled otherwise it would stand out. There is a bug in Qt where QComboBox will not render properly on OS X when the background style of a parent is adjusted. In order to solve this, the background style of such a widget should only be set for that object, so by naming it and setting the style only for objects with that name the bug can be worked around. Use this function whenever a (container) widget is needed inside a QScrollArea in a QTabWidget. :type widget: QWidget """ if sys.platform.startswith("darwin"): widget.setObjectName("tabWidget") widget.setStyleSheet("#tabWidget {background: rgb(229, 229, 229);}")
""" Style Contains convenience functions for styling widgets. :Authors: Berend Klein Haneveld """ import sys def styleWidgetForTab(widget): """ This function style a widget that can be used inside a QScrollArea that is inside a QTabWidget. On OS X the background color inside a tab widget is slightly darker than the default, so it has to be styled otherwise it would stand out. There is a bug in Qt where QComboBox will not render properly on OS X when the background style of a parent is adjusted. In order to solve this, the background style of such a widget should only be set for that object, so by naming it and setting the style only for objects with that name the bug can be worked around. Use this function whenever a (container) widget is needed inside a QScrollArea in a QTabWidget. :type widget: QWidget """ if sys.platform.startswith("darwin"): widget.setObjectName("tabWidget") widget.setStyleSheet("#tabWidget {background: rgb(229, 229, 229);}") elif sys.platform.startswith("linux"): # This makes it look pretty on Elementary theme widget.setObjectName("tabWidget") widget.setStyleSheet("#tabWidget {background: rgb(236, 236, 236);}")
Make tab widgets look good on ElementaryOS.
Make tab widgets look good on ElementaryOS. Any other linux distribution will probably look bad because of this. In the future a better solution must be found to styling the background of a widget inside a scrollarea inside a tabwidget.
Python
mit
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
--- +++ @@ -22,7 +22,7 @@ should only be set for that object, so by naming it and setting the style only for objects with that name the bug can be worked around. - + Use this function whenever a (container) widget is needed inside a QScrollArea in a QTabWidget. :type widget: QWidget @@ -30,3 +30,7 @@ if sys.platform.startswith("darwin"): widget.setObjectName("tabWidget") widget.setStyleSheet("#tabWidget {background: rgb(229, 229, 229);}") + elif sys.platform.startswith("linux"): + # This makes it look pretty on Elementary theme + widget.setObjectName("tabWidget") + widget.setStyleSheet("#tabWidget {background: rgb(236, 236, 236);}")
c6e48c224b48e90c57d7731fc88be7703990a02a
app/chess/piece.py
app/chess/piece.py
class ChessPiece(object): def __init__(self): self.column = 0 self.row = 0 self.symbol = '' # Checks piece can attack the specified position def can_attack_position(self, column, row): pass # return the character representation of this chess piece def get_symbol(self): return self.symbol def set_column(self, column): self.column = column def get_column(self): return self.column def set_row(self, row): self.row = row def get_row(self): return self.row class King(ChessPiece): def __init__(self): self.symbol = 'K' print '>>> Buil king piece' def can_attack_position(self, column, row): return True class Queen(ChessPiece): def __init__(self): self.symbol = 'Q' print '>>> Buil Queen piece' def can_attack_position(self, column, row): return True
from math import hypot class ChessPiece(object): def __init__(self): self.x = 0 self.y = 0 self.symbol = '' # Checks piece can attack the specified position def deplace_piece(self, square): self.x = square.x self.y = square.y # return the character representation of this chess piece def get_symbol(self): return self.symbol def set_column(self, column): self.x = column def get_column(self): return self.x def set_row(self, row): self.y = row def get_row(self): return self.y def pos(self): return(self.x, self.y) def check_attack(self, p): return None class King(ChessPiece): def __init__(self): ChessPiece.__init__(self) self.symbol = 'K' print '>>> Buil king piece' def check_attack(self, pos): dist = hypot(self.x - pos.x, self.y - pos.y) if dist <= 1: return True else: return False class Queen(ChessPiece): def __init__(self): ChessPiece.__init__(self) self.symbol = 'Q' print '>>> Buil Queen piece' def check_attack(self, pos): return True
Add attack function to KING class
Add attack function to KING class
Python
mit
aymguesmi/ChessChallenge
--- +++ @@ -1,49 +1,64 @@ +from math import hypot + + class ChessPiece(object): def __init__(self): - self.column = 0 - self.row = 0 + self.x = 0 + self.y = 0 self.symbol = '' # Checks piece can attack the specified position - def can_attack_position(self, column, row): - pass + def deplace_piece(self, square): + self.x = square.x + self.y = square.y # return the character representation of this chess piece def get_symbol(self): return self.symbol def set_column(self, column): - self.column = column + self.x = column def get_column(self): - return self.column + return self.x def set_row(self, row): - self.row = row + self.y = row def get_row(self): - return self.row + return self.y + + def pos(self): + return(self.x, self.y) + + def check_attack(self, p): + return None class King(ChessPiece): def __init__(self): + ChessPiece.__init__(self) self.symbol = 'K' print '>>> Buil king piece' - def can_attack_position(self, column, row): - return True + def check_attack(self, pos): + dist = hypot(self.x - pos.x, self.y - pos.y) + if dist <= 1: + return True + else: + return False class Queen(ChessPiece): def __init__(self): + ChessPiece.__init__(self) self.symbol = 'Q' print '>>> Buil Queen piece' - def can_attack_position(self, column, row): + def check_attack(self, pos): return True -
0943079243e918496bc2cc5a764025bd9dd4f0db
groundstation/stream_client.py
groundstation/stream_client.py
from sockets.stream_socket import StreamSocket from transfer.request import Request from transfer.notification import Notification import settings import groundstation.logger log = groundstation.logger.getLogger(__name__) class StreamClient(StreamSocket): def __init__(self, addr): super(StreamClient, self).__init__() # TODO Pretty sure this should be a struct sockaddr self.peer = addr self.socket.connect((addr, settings.PORT)) self.socket.setblocking(False) def begin_handshake(self, station): request = Request("LISTALLOBJECTS", station=station, stream=self) station.register_request(request) self.enqueue(request) def notify_new_object(self, station, obj): notification = Notification("NEWOBJECT", station=station, stream=self, payload=obj) self.enqueue(notification)
from sockets.stream_socket import StreamSocket from transfer.request import Request from transfer.notification import Notification import settings from groundstation.utils import path2id import groundstation.logger log = groundstation.logger.getLogger(__name__) class StreamClient(StreamSocket): def __init__(self, addr): super(StreamClient, self).__init__() # TODO Pretty sure this should be a struct sockaddr self.peer = addr self.socket.connect((addr, settings.PORT)) self.socket.setblocking(False) def begin_handshake(self, station): request = Request("LISTALLOBJECTS", station=station, stream=self) station.register_request(request) self.enqueue(request) def notify_new_object(self, station, path): obj = path2id(path) notification = Notification("NEWOBJECT", station=station, stream=self, payload=obj) self.enqueue(notification)
Convert paths to objectnames before trying to use them
Convert paths to objectnames before trying to use them
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
--- +++ @@ -2,6 +2,8 @@ from transfer.request import Request from transfer.notification import Notification import settings + +from groundstation.utils import path2id import groundstation.logger log = groundstation.logger.getLogger(__name__) @@ -20,6 +22,7 @@ station.register_request(request) self.enqueue(request) - def notify_new_object(self, station, obj): + def notify_new_object(self, station, path): + obj = path2id(path) notification = Notification("NEWOBJECT", station=station, stream=self, payload=obj) self.enqueue(notification)
e2132caf1c677b34eddd679e23983022ec12b5df
watermarker/conf.py
watermarker/conf.py
# -*- coding: utf-8 -*- import warnings from django.conf import settings # pylint: disable=W0611 from appconf import AppConf class WatermarkSettings(AppConf): QUALITY = 85 OBSCURE_ORIGINAL = True RANDOM_POSITION_ONCE = True WATERMARK_PERCENTAGE = 30 class Meta: prefix = 'watermark' holder = 'watermarker.conf.settings' def configure_quality(self, value): if getattr(settings, 'WATERMARKING_QUALITY', None): warnings.warn("WATERMARKING_QUALITY is deprecated, use WATERMARK_QUALITY", DeprecationWarning) return value
# -*- coding: utf-8 -*- import warnings from django.conf import settings # pylint: disable=W0611 from appconf import AppConf class WatermarkSettings(AppConf): QUALITY = 85 OBSCURE_ORIGINAL = True RANDOM_POSITION_ONCE = True WATERMARK_PERCENTAGE = getattr(settings, 'WATERMARK_PERCENTAGE', 30) class Meta: prefix = 'watermark' holder = 'watermarker.conf.settings' def configure_quality(self, value): if getattr(settings, 'WATERMARKING_QUALITY', None): warnings.warn("WATERMARKING_QUALITY is deprecated, use WATERMARK_QUALITY", DeprecationWarning) return value
Change AppConf class to use settings defined value or default.
Change AppConf class to use settings defined value or default.
Python
bsd-3-clause
lzanuz/django-watermark,lzanuz/django-watermark
--- +++ @@ -11,7 +11,7 @@ QUALITY = 85 OBSCURE_ORIGINAL = True RANDOM_POSITION_ONCE = True - WATERMARK_PERCENTAGE = 30 + WATERMARK_PERCENTAGE = getattr(settings, 'WATERMARK_PERCENTAGE', 30) class Meta: prefix = 'watermark'
13291e4862ef48a3de3615e8eef5704c6bfff628
importlib_metadata/__init__.py
importlib_metadata/__init__.py
import os import sys import glob class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): for path_item in path: glob_spec = os.path.join(path_item, f'{name}-*.dist-info') match = next(glob.iglob(glob_spec)) return cls(os.path.join(path_item, match)) @classmethod def for_module(cls, mod): return cls.for_name(cls.dist_name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__)
import os import sys import glob import email import itertools import contextlib class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): for path_item in path: glob_specs = ( os.path.join(path_item, f'{name}-*.*-info'), os.path.join(path_item, f'{name}.*-info'), ) globs = itertools.chain.from_iterable(map(glob.iglob, glob_specs)) match = next(globs) return cls(os.path.join(path_item, match)) @classmethod def for_module(cls, mod): return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) @property def metadata(self): return email.message_from_string( self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') ) def load_metadata(self, name): fn = os.path.join(self.path, name) with contextlib.suppress(FileNotFoundError): with open(fn, encoding='utf-8') as strm: return strm.read() @property def version(self): return self.metadata['Version']
Implement metadata loading and version retrieval
Implement metadata loading and version retrieval
Python
apache-2.0
python/importlib_metadata
--- +++ @@ -1,6 +1,9 @@ import os import sys import glob +import email +import itertools +import contextlib class Distribution: @@ -13,14 +16,34 @@ @classmethod def for_name(cls, name, path=sys.path): for path_item in path: - glob_spec = os.path.join(path_item, f'{name}-*.dist-info') - match = next(glob.iglob(glob_spec)) + glob_specs = ( + os.path.join(path_item, f'{name}-*.*-info'), + os.path.join(path_item, f'{name}.*-info'), + ) + globs = itertools.chain.from_iterable(map(glob.iglob, glob_specs)) + match = next(globs) return cls(os.path.join(path_item, match)) @classmethod def for_module(cls, mod): - return cls.for_name(cls.dist_name_for_module(mod)) + return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) + + @property + def metadata(self): + return email.message_from_string( + self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') + ) + + def load_metadata(self, name): + fn = os.path.join(self.path, name) + with contextlib.suppress(FileNotFoundError): + with open(fn, encoding='utf-8') as strm: + return strm.read() + + @property + def version(self): + return self.metadata['Version']
48543d559d13bb9446f455d14ec3e8ae1ff4f2d7
angular_flask/__init__.py
angular_flask/__init__.py
import os from flask import Flask from flask_sslify import SSLify app = Flask(__name__, instance_path='/instance') if 'DYNO' in os.environ: sslify = SSLify(app) app.config.from_object('config') app.config.from_pyfile('config.py') import angular_flask.core import angular_flask.models import angular_flask.controllers
import os from flask import Flask from flask_sslify import SSLify app = Flask(__name__, instance_relative_config=True) if 'DYNO' in os.environ: sslify = SSLify(app) app.config.from_object('config') app.config.from_pyfile('config.py', True) import angular_flask.core import angular_flask.models import angular_flask.controllers
Set tru to config from pyfile
Set tru to config from pyfile
Python
mit
Clarity-89/blog,Clarity-89/blog,Clarity-89/blog
--- +++ @@ -3,12 +3,12 @@ from flask_sslify import SSLify -app = Flask(__name__, instance_path='/instance') +app = Flask(__name__, instance_relative_config=True) if 'DYNO' in os.environ: sslify = SSLify(app) app.config.from_object('config') -app.config.from_pyfile('config.py') +app.config.from_pyfile('config.py', True) import angular_flask.core import angular_flask.models
dcba06dfc3ae1e558c3b7926780b0934b7ac3fda
trackpy/tests/test_misc.py
trackpy/tests/test_misc.py
from __future__ import (absolute_import, division, print_function, unicode_literals) import six import os import unittest import warnings import pims import trackpy import trackpy.diag path, _ = os.path.split(os.path.abspath(__file__)) class DiagTests(unittest.TestCase): def test_performance_report(self): trackpy.diag.performance_report() def test_dependencies(self): trackpy.diag.dependencies() class APITests(unittest.TestCase): def test_pims_deprecation(self): """Using a pims class should work, but generate a warning. The inclusion of these classes (and therefore this test) in trackpy is deprecated as of v0.3 and will be removed in a future version.""" with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always', UserWarning) imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png')) assert isinstance(imseq, pims.ImageSequence) if len(w) != 1: print('Caught warnings:') for wrn in w: print(wrn, wrn.message) assert len(w) == 1
from __future__ import (absolute_import, division, print_function, unicode_literals) import six import os import unittest import warnings import pims import trackpy import trackpy.diag path, _ = os.path.split(os.path.abspath(__file__)) class DiagTests(unittest.TestCase): def test_performance_report(self): trackpy.diag.performance_report() def test_dependencies(self): trackpy.diag.dependencies() class APITests(unittest.TestCase): def test_pims_deprecation(self): """Using a pims class should work, but generate a warning. The inclusion of these classes (and therefore this test) in trackpy is deprecated as of v0.3 and will be removed in a future version.""" with warnings.catch_warnings(record=True) as w: warnings.simplefilter('ignore') warnings.simplefilter('always', UserWarning) imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png')) assert isinstance(imseq, pims.ImageSequence) if len(w) != 1: print('Caught warnings:') for wrn in w: print(wrn, wrn.message) assert len(w) == 1
Fix pims warning test under Py3
TST: Fix pims warning test under Py3
Python
bsd-3-clause
daniorerio/trackpy,daniorerio/trackpy
--- +++ @@ -27,6 +27,7 @@ trackpy is deprecated as of v0.3 and will be removed in a future version.""" with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('ignore') warnings.simplefilter('always', UserWarning) imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png')) assert isinstance(imseq, pims.ImageSequence)
bebc2a499a4190c8c3090bcab0203b913aa7592d
events/auth.py
events/auth.py
from django.contrib.auth.models import AnonymousUser from rest_framework import authentication from rest_framework import exceptions from events.models import DataSource from django.utils.translation import ugettext_lazy as _ class ApiKeyAuthentication(authentication.BaseAuthentication): def authenticate(self, request): api_key = request.META.get('apikey') if not api_key: return None data_source = self.get_data_source(api_key=api_key) return ApiKeyUser(), ApiKeyAuth(data_source) def authenticate_header(self, request): """ Return a string to be used as the value of the `WWW-Authenticate` header in a `401 Unauthenticated` response, or `None` if the authentication scheme should return `403 Permission Denied` responses. """ return "Api key authentication failed." @staticmethod def get_data_source(api_key): try: data_source = DataSource.objects.get(api_key=api_key) except DataSource.DoesNotExist: raise exceptions.AuthenticationFailed(_( "Provided API key does not match any organization on record. " "Please contact the API support staff to obtain a valid API key " "and organization identifier for POSTing your events.")) return data_source class ApiKeyUser(AnonymousUser): def is_authenticated(self): return True class ApiKeyAuth(object): def __init__(self, data_source): self.data_source = data_source def get_authenticated_data_source(self): return self.data_source
from django.contrib.auth.models import AnonymousUser from rest_framework import authentication from rest_framework import exceptions from events.models import DataSource from django.utils.translation import ugettext_lazy as _ class ApiKeyAuthentication(authentication.BaseAuthentication): def authenticate(self, request): # django converts 'apikey' to 'HTTP_APIKEY' outside runserver api_key = request.META.get('apikey') or request.META.get('HTTP_APIKEY') if not api_key: return None data_source = self.get_data_source(api_key=api_key) return ApiKeyUser(), ApiKeyAuth(data_source) def authenticate_header(self, request): """ Return a string to be used as the value of the `WWW-Authenticate` header in a `401 Unauthenticated` response, or `None` if the authentication scheme should return `403 Permission Denied` responses. """ return "Api key authentication failed." @staticmethod def get_data_source(api_key): try: data_source = DataSource.objects.get(api_key=api_key) except DataSource.DoesNotExist: raise exceptions.AuthenticationFailed(_( "Provided API key does not match any organization on record. " "Please contact the API support staff to obtain a valid API key " "and organization identifier for POSTing your events.")) return data_source class ApiKeyUser(AnonymousUser): def is_authenticated(self): return True class ApiKeyAuth(object): def __init__(self, data_source): self.data_source = data_source def get_authenticated_data_source(self): return self.data_source
Fix checking apikey outside runserver
Fix checking apikey outside runserver
Python
mit
City-of-Helsinki/linkedevents,aapris/linkedevents,aapris/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
--- +++ @@ -7,7 +7,8 @@ class ApiKeyAuthentication(authentication.BaseAuthentication): def authenticate(self, request): - api_key = request.META.get('apikey') + # django converts 'apikey' to 'HTTP_APIKEY' outside runserver + api_key = request.META.get('apikey') or request.META.get('HTTP_APIKEY') if not api_key: return None data_source = self.get_data_source(api_key=api_key)
b0a275e2430a04bf4e47b823f48cade92c407673
apiserver/worker/grab_config.py
apiserver/worker/grab_config.py
""" Grab worker configuration from GCloud instance attributes. """ import json import requests MANAGER_URL_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-manager-url" SECRET_FOLDER_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-secret-folder" GPU_CAPABILITY_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-gpu" MANAGER_URL = requests.get(MANAGER_URL_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text SECRET_FOLDER = requests.get(SECRET_FOLDER_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text HAS_GPU = requests.get(SECRET_FOLDER_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text == "true" with open("config.json", "w") as configfile: json.dump({ "MANAGER_URL": MANAGER_URL, "SECRET_FOLDER": SECRET_FOLDER, "CAPABILITIES": ["gpu"] if HAS_GPU else [], }, configfile)
""" Grab worker configuration from GCloud instance attributes. """ import json import requests MANAGER_URL_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-manager-url" SECRET_FOLDER_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-secret-folder" GPU_CAPABILITY_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/halite-gpu" MANAGER_URL = requests.get(MANAGER_URL_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text SECRET_FOLDER = requests.get(SECRET_FOLDER_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text HAS_GPU = requests.get(GPU_CAPABILITY_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text == "true" with open("config.json", "w") as configfile: json.dump({ "MANAGER_URL": MANAGER_URL, "SECRET_FOLDER": SECRET_FOLDER, "CAPABILITIES": ["gpu"] if HAS_GPU else [], }, configfile)
Fix typo in GPU worker config setup
Fix typo in GPU worker config setup
Python
mit
HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II
--- +++ @@ -15,7 +15,7 @@ SECRET_FOLDER = requests.get(SECRET_FOLDER_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text -HAS_GPU = requests.get(SECRET_FOLDER_METADATA_URL, headers={ +HAS_GPU = requests.get(GPU_CAPABILITY_METADATA_URL, headers={ "Metadata-Flavor": "Google" }).text == "true"
7b520e973ed9a72cc3b68bda0a48c89b6d60558b
examples/connect4_uci_outcomes.py
examples/connect4_uci_outcomes.py
from __future__ import division, print_function from collections import Counter from capstone.util.c4uci import load_instance FILENAME = 'datasets/connect-4.data' outcomes = [] with open(FILENAME) as f: for i, line in enumerate(f, 1): _, outcome = load_instance(line) outcomes.append(outcome) if i % 1000 == 0: print(i) counter = Counter(outcomes) print('\n---------') print(' Results') print('---------\n') print('total: {}'.format(len(outcomes))) for outcome in ['win', 'loss', 'draw']: print('{outcome}: {count} ({pct:.2f}%)'.format( outcome=outcome, count=counter[outcome], pct=((counter[outcome] / len(outcomes)) * 100) ))
from __future__ import division, print_function import pandas as pd from sklearn.linear_model import LinearRegression from capstone.game import Connect4 as C4 from capstone.util import print_header FILENAME = 'datasets/connect-4.data' def column_name(i): if i == 42: return 'outcome' row = chr(ord('a') + (i // C4.ROWS)) col = (i % C4.ROWS) + 1 return '{row}{col}'.format(row=row, col=col) column_names = [column_name(i) for i in range(43)] df = pd.read_csv(FILENAME, header=None, names=column_names) outcomes = df.loc[:, 'outcome'] print_header('Dataset') print(df, end='\n\n') print_header('Number of instances') print(df.shape[0], end='\n\n') print_header('Outcomes') print(outcomes.value_counts(), end='\n\n') print_header('Normalized Outcomes') print(outcomes.value_counts(normalize=True))
Use pandas dataframes for UCI C4 dataset
Use pandas dataframes for UCI C4 dataset
Python
mit
davidrobles/mlnd-capstone-code
--- +++ @@ -1,26 +1,30 @@ from __future__ import division, print_function -from collections import Counter -from capstone.util.c4uci import load_instance +import pandas as pd +from sklearn.linear_model import LinearRegression +from capstone.game import Connect4 as C4 +from capstone.util import print_header FILENAME = 'datasets/connect-4.data' -outcomes = [] +def column_name(i): + if i == 42: + return 'outcome' + row = chr(ord('a') + (i // C4.ROWS)) + col = (i % C4.ROWS) + 1 + return '{row}{col}'.format(row=row, col=col) -with open(FILENAME) as f: - for i, line in enumerate(f, 1): - _, outcome = load_instance(line) - outcomes.append(outcome) - if i % 1000 == 0: - print(i) +column_names = [column_name(i) for i in range(43)] +df = pd.read_csv(FILENAME, header=None, names=column_names) +outcomes = df.loc[:, 'outcome'] -counter = Counter(outcomes) -print('\n---------') -print(' Results') -print('---------\n') -print('total: {}'.format(len(outcomes))) -for outcome in ['win', 'loss', 'draw']: - print('{outcome}: {count} ({pct:.2f}%)'.format( - outcome=outcome, - count=counter[outcome], - pct=((counter[outcome] / len(outcomes)) * 100) - )) +print_header('Dataset') +print(df, end='\n\n') + +print_header('Number of instances') +print(df.shape[0], end='\n\n') + +print_header('Outcomes') +print(outcomes.value_counts(), end='\n\n') + +print_header('Normalized Outcomes') +print(outcomes.value_counts(normalize=True))
7eb8466484043dea127c168f41027aea9ff7679d
examples/generate-manager-file.py
examples/generate-manager-file.py
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \ CONN_MGR_PARAM_FLAG_REGISTER if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): (name, flags, type, default) = param print "param-%s=%s" % (name, type), if flags & CONN_MGR_PARAM_FLAG_REQUIRED: print "required", if flags & CONN_MGR_PARAM_FLAG_REGISTER: print "register", print print
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \ CONN_MGR_PARAM_FLAG_REGISTER if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: defaults = [] print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): (name, flags, type, default) = param print "param-%s=%s" % (name, type), if flags & CONN_MGR_PARAM_FLAG_REQUIRED: print "required", if flags & CONN_MGR_PARAM_FLAG_REGISTER: print "register", print if default != "": # FIXME: is there a better way to check if a default # exists? defaults.append( (name, type, default) ) for default in defaults: if default[1] == "b": if default[2]: value = "true" else: value = "false" else: value = str(default[2]) print "default-%s=%s" % (default[0], value) print
Include default values in generated .manager files
Include default values in generated .manager files 20070911135919-4210b-773464bb139c29ab8da7ee85334bf2907e5d6e3c.gz
Python
lgpl-2.1
freedesktop-unofficial-mirror/telepathy__telepathy-python,detrout/telepathy-python,epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,epage/telepathy-python,PabloCastellano/telepathy-python,PabloCastellano/telepathy-python,max-posedon/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python
--- +++ @@ -23,6 +23,7 @@ protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: + defaults = [] print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): (name, flags, type, default) = param @@ -33,4 +34,17 @@ if flags & CONN_MGR_PARAM_FLAG_REGISTER: print "register", print + + if default != "": # FIXME: is there a better way to check if a default + # exists? + defaults.append( (name, type, default) ) + for default in defaults: + if default[1] == "b": + if default[2]: + value = "true" + else: + value = "false" + else: + value = str(default[2]) + print "default-%s=%s" % (default[0], value) print
6ddba0706a377874978da064b47bb6245e362f5b
floyd/floyd.py
floyd/floyd.py
#!-*- coding:utf-8 -*- """ Floyd's cycle-finding algorithm http://en.wikipedia.org/wiki/Cycle_detection http://www.siafoo.net/algorithm/10 """ def floyd(top): """ >>> floyd([1,2,3,4]) False >>> floyd([1,2,1,2,1]) True >>> floyd([1,2,3,1,2,3,1]) True >>> floyd([1,2,3,1,2,3,1,2,3,1]) True >>> floyd(["A","B","A","B","A"]) True """ tortoise = top hare = top while True: # Is Hare at End? if not hare[1:]: return False # NO LOOP hare = hare[1:] # Increment hare # Is Hare at End? if not hare[1:]: return False # NO LOOP hare = hare[1:] # Increment Hare Again tortoise = tortoise[1:] # Did Hare Meet Tortoise? if hare[0] == tortoise[0]: return True # LOOP! if __name__ == "__main__": import doctest doctest.testmod()
# -*- coding:utf-8 -*- """ Floyd's cycle-finding algorithm http://en.wikipedia.org/wiki/Cycle_detection http://www.siafoo.net/algorithm/10 """ def floyd(top): """ >>> floyd([1,2,3,4]) False >>> floyd([1,2,1,2,1]) True >>> floyd([1,2,3,1,2,3,1]) True >>> floyd([1,2,3,1,2,3,1,2,3,1]) True >>> floyd(["A","B","A","B","A"]) True """ tortoise = top hare = top while True: # Is Hare at End? if not hare[1:]: return False # NO LOOP hare = hare[1:] # Increment hare # Is Hare at End? if not hare[1:]: return False # NO LOOP hare = hare[1:] # Increment Hare Again tortoise = tortoise[1:] # Did Hare Meet Tortoise? if hare[0] == tortoise[0]: return True # LOOP! if __name__ == "__main__": import doctest doctest.testmod()
Implement of Cantor Paring Function
Implement of Cantor Paring Function
Python
mit
rokujyouhitoma/tips,rokujyouhitoma/tips,rokujyouhitoma/tips,rokujyouhitoma/tips
--- +++ @@ -1,4 +1,5 @@ -#!-*- coding:utf-8 -*- +# -*- coding:utf-8 -*- + """ Floyd's cycle-finding algorithm http://en.wikipedia.org/wiki/Cycle_detection
655e741375b3fad7e3b7657662d33ca8017c0220
test/requests/link_checker.py
test/requests/link_checker.py
import requests def check_links(args_obj, parser): print("") print("Checking links") print("########################") print("Not implemented yet.") print("This is supposed to check all links in the system.") print("########################")
from __future__ import print_function import re import requests from lxml.html import parse from requests.exceptions import ConnectionError def is_root_link(link): pattern = re.compile("^/$") return pattern.match(link) def is_mailto_link(link): pattern = re.compile("^mailto:.*") return pattern.match(link) def is_internal_link(link): pattern = re.compile("^/.*") return pattern.match(link) def get_links(doc): return filter( lambda x: not ( is_root_link(x) or is_mailto_link(x)) , map(lambda y: y.get("href") , doc.cssselect("a"))) def verify_link(link): try: result = requests.get(link, timeout=20) if result.status_code == 200: print(link+" ==> OK") else: print("ERROR: link `"+link+"` failed with status " , result.status_code) except ConnectionError as ex: print("ERROR: ", link, ex) def check_page(host, start_url): print("") print("Checking links in page `"+start_url+"`") doc = parse(start_url).getroot() links = get_links(doc) internal_links = filter(is_internal_link, links) external_links = filter(lambda x: not is_internal_link(x), links) external_links.append("http://somenon-existentsite.brr") for link in internal_links: verify_link(host+link) for link in external_links: verify_link(link) def check_links(args_obj, parser): print("") print("Checking links") host = args_obj.host # Check the home page check_page(host, host) # Check traits page check_page( host, host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
Add tests to check links.
Add tests to check links.
Python
agpl-3.0
zsloan/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2
--- +++ @@ -1,9 +1,63 @@ +from __future__ import print_function +import re import requests +from lxml.html import parse +from requests.exceptions import ConnectionError + +def is_root_link(link): + pattern = re.compile("^/$") + return pattern.match(link) + +def is_mailto_link(link): + pattern = re.compile("^mailto:.*") + return pattern.match(link) + +def is_internal_link(link): + pattern = re.compile("^/.*") + return pattern.match(link) + +def get_links(doc): + return filter( + lambda x: not ( + is_root_link(x) + or is_mailto_link(x)) + , map(lambda y: y.get("href") + , doc.cssselect("a"))) + +def verify_link(link): + try: + result = requests.get(link, timeout=20) + if result.status_code == 200: + print(link+" ==> OK") + else: + print("ERROR: link `"+link+"` failed with status " + , result.status_code) + except ConnectionError as ex: + print("ERROR: ", link, ex) + +def check_page(host, start_url): + print("") + print("Checking links in page `"+start_url+"`") + doc = parse(start_url).getroot() + links = get_links(doc) + internal_links = filter(is_internal_link, links) + external_links = filter(lambda x: not is_internal_link(x), links) + external_links.append("http://somenon-existentsite.brr") + for link in internal_links: + verify_link(host+link) + + for link in external_links: + verify_link(link) def check_links(args_obj, parser): print("") print("Checking links") - print("########################") - print("Not implemented yet.") - print("This is supposed to check all links in the system.") - print("########################") + host = args_obj.host + + # Check the home page + check_page(host, host) + + # Check traits page + check_page( + host, + host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
2bb9eab65b56f012076ed6f122cda4d58b576eb7
gather/main.py
gather/main.py
#!/usr/bin/env python3 import logging import json from gather.gatherbot import GatherBot from gather import commands def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token']) if __name__ == '__main__': main()
#!/usr/bin/env python3 import logging import json from gather.gatherbot import GatherBot from gather import commands def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)$', commands.add) bot.register_action('^!(?:remove|so)$', commands.remove) bot.run(config['token']) if __name__ == '__main__': main()
Fix regexes so that so doesn't match s
Fix regexes so that so doesn't match s
Python
mit
veryhappythings/discord-gather
--- +++ @@ -16,8 +16,8 @@ bot = GatherBot() bot.register_action('^!help', commands.bot_help) - bot.register_action('^!(?:add|s)', commands.add) - bot.register_action('^!(?:remove|so)', commands.remove) + bot.register_action('^!(?:add|s)$', commands.add) + bot.register_action('^!(?:remove|so)$', commands.remove) bot.run(config['token'])
1e8fd33ef4e8b75632d8a4fe4d86944fdfc5a649
beetle/__init__.py
beetle/__init__.py
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): pass
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): def __init__(self, page=None): self.page = page
Allow the BeetleError class to take a page object as an argument
Allow the BeetleError class to take a page object as an argument
Python
mit
cknv/beetle
--- +++ @@ -3,4 +3,5 @@ project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): - pass + def __init__(self, page=None): + self.page = page
98ebd229819cb108af7746dfdd950019111063ce
http_server.py
http_server.py
import socket class HttpServer(object): """docstring for HttpServer""" def __init__(self, ip=u'127.0.0.1', port=50000, backlog=5): self._ip = ip self._port = port self._backlog = backlog self._socket = None def open_socket(self): self._socket = socket.socket( socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP) self._socket.bind((self._ip, self._port)) self._socket.listen(self._backlog)
import socket class HttpServer(object): """docstring for HttpServer""" def __init__(self, ip=u'127.0.0.1', port=50000, backlog=5): self._ip = ip self._port = port self._backlog = backlog self._socket = None def open_socket(self): self._socket = socket.socket( socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP) self._socket.bind((self._ip, self._port)) self._socket.listen(self._backlog) def close_socket(self): self._socket.shutdown(socket.SHUT_WR) self._socket.close() self._socket = None
Add HttpServer.close_socket() to the server's socket
Add HttpServer.close_socket() to the server's socket
Python
mit
jefrailey/network_tools
--- +++ @@ -16,3 +16,8 @@ socket.IPPROTO_IP) self._socket.bind((self._ip, self._port)) self._socket.listen(self._backlog) + + def close_socket(self): + self._socket.shutdown(socket.SHUT_WR) + self._socket.close() + self._socket = None
0824bb4025d00d9e435c162a0b1931d448baf7c9
hardware/sense_hat/marble_maze.py
hardware/sense_hat/marble_maze.py
# based on https://www.raspberrypi.org/learning/sense-hat-marble-maze/worksheet/ from sense_hat import SenseHat import time sense = SenseHat() sense.clear() time.sleep(0.5) r = (255, 0, 0 ) b = (0,0,0) w = (255, 255, 255 ) x = 1 y = 1 maze = [[r,r,r,r,r,r,r,r], [r,b,b,b,b,b,b,r], [r,r,r,b,r,b,b,r], [r,b,r,b,r,r,r,r], [r,b,b,b,b,b,b,r], [r,b,r,r,r,r,b,r], [r,b,b,r,b,b,b,r], [r,r,r,r,r,r,r,r]] sense.set_pixels(sum(maze,[])) game_over = False while not game_over: maze[y][x] = w sense.set_pixels(sum(maze,[]))
# based on https://www.raspberrypi.org/learning/sense-hat-marble-maze/worksheet/ from sense_hat import SenseHat import time sense = SenseHat() sense.clear() time.sleep(0.5) r = (255, 0, 0 ) b = (0,0,0) w = (255, 255, 255 ) x = 1 y = 1 maze = [[r,r,r,r,r,r,r,r], [r,b,b,b,b,b,b,r], [r,r,r,b,r,b,b,r], [r,b,r,b,r,r,r,r], [r,b,b,b,b,b,b,r], [r,b,r,r,r,r,b,r], [r,b,b,r,b,b,b,r], [r,r,r,r,r,r,r,r]] def move_marble(pitch, roll, x, y): new_x = x new_y = y if 1 < pitch < 179: new_x -= 1 elif 359 > pitch > 181: new_x += 1 return new_x, new_y game_over = False while not game_over: pitch = sense.get_orientation()['pitch'] roll = sense.get_orientation()['roll'] x,y = move_marble(pitch, roll, x, y) maze[y][x] = w sense.set_pixels(sum(maze,[]))
Add moving of marble on x axis - currently brokwn
Add moving of marble on x axis - currently brokwn
Python
mit
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
--- +++ @@ -24,10 +24,21 @@ [r,b,b,r,b,b,b,r], [r,r,r,r,r,r,r,r]] -sense.set_pixels(sum(maze,[])) +def move_marble(pitch, roll, x, y): + new_x = x + new_y = y + if 1 < pitch < 179: + new_x -= 1 + elif 359 > pitch > 181: + new_x += 1 + return new_x, new_y game_over = False while not game_over: + pitch = sense.get_orientation()['pitch'] + roll = sense.get_orientation()['roll'] + x,y = move_marble(pitch, roll, x, y) maze[y][x] = w sense.set_pixels(sum(maze,[])) +
0d81d93a0c90c8cda2e762255c2d41b99ddc16f3
macdict/cli.py
macdict/cli.py
from __future__ import absolute_import import sys import argparse from macdict.dictionary import lookup_word def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('word') return parser.parse_args() def abort(text): sys.stderr.write(u'%s\n' % text) sys.exit(1) def report(text): sys.stdout.write(u'%s\n' % text) sys.exit(0) def main(): args = parse_args() definition = lookup_word(args.word) if definition is None: abort(u'Definition not found for "%s"' % args.word) else: report(definition)
from __future__ import absolute_import import sys import argparse from macdict.dictionary import lookup_word def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('word') return parser.parse_args() def abort(text): sys.stderr.write(u'%s\n' % text) sys.exit(1) def report(text): sys.stdout.write(u'%s\n' % text) sys.exit(0) def main(): args = parse_args() definition = lookup_word(args.word.decode('utf-8')) if definition is None: abort(u'Definition not found for "%s"' % args.word) else: report(definition)
Fix CJK input in command line arguments
Fix CJK input in command line arguments
Python
mit
tonyseek/macdict
--- +++ @@ -24,7 +24,7 @@ def main(): args = parse_args() - definition = lookup_word(args.word) + definition = lookup_word(args.word.decode('utf-8')) if definition is None: abort(u'Definition not found for "%s"' % args.word) else: