commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
17c9256000f78fd8fc52f86729dcbb39cb80b3a3
src/mcedit2/widgets/nbttree/nbttreeview.py
src/mcedit2/widgets/nbttree/nbttreeview.py
""" nbttreewidget """ from __future__ import absolute_import, division, print_function, unicode_literals import logging from PySide import QtGui from PySide.QtCore import Qt from mcedit2.widgets.nbttree.nbttreemodel import NBTFilterProxyModel from mcedit2.util.load_ui import registerCustomWidget from mcedit2.widgets.layout import Row log = logging.getLogger(__name__) @registerCustomWidget class NBTTreeView(QtGui.QWidget): def __init__(self, *args, **kwargs): super(NBTTreeView, self).__init__(*args, **kwargs) self.treeView = QtGui.QTreeView() self.setLayout(Row(self.treeView)) def setModel(self, model): self.model = model proxyModel = NBTFilterProxyModel(self) proxyModel.setSourceModel(model) proxyModel.setDynamicSortFilter(True) self.treeView.setModel(model) self.treeView.sortByColumn(0, Qt.AscendingOrder) self.treeView.expandToDepth(0) self.treeView.resizeColumnToContents(0) self.treeView.resizeColumnToContents(1)
""" nbttreewidget """ from __future__ import absolute_import, division, print_function, unicode_literals import logging from PySide import QtGui from PySide.QtCore import Qt from mcedit2.widgets.nbttree.nbttreemodel import NBTFilterProxyModel from mcedit2.util.load_ui import registerCustomWidget from mcedit2.widgets.layout import Row log = logging.getLogger(__name__) @registerCustomWidget class NBTTreeView(QtGui.QWidget): def __init__(self, *args, **kwargs): super(NBTTreeView, self).__init__(*args, **kwargs) self.treeView = QtGui.QTreeView() self.setLayout(Row(self.treeView)) def setModel(self, model): self.model = model proxyModel = NBTFilterProxyModel(self) proxyModel.setSourceModel(model) proxyModel.setDynamicSortFilter(True) self.treeView.setModel(proxyModel) self.treeView.sortByColumn(0, Qt.AscendingOrder) self.treeView.expandToDepth(0) self.treeView.resizeColumnToContents(0) self.treeView.resizeColumnToContents(1)
Set NBT tree view with correct proxy model (oops)
Set NBT tree view with correct proxy model (oops)
Python
bsd-3-clause
Rubisk/mcedit2,Rubisk/mcedit2,vorburger/mcedit2,vorburger/mcedit2
--- +++ @@ -28,7 +28,7 @@ proxyModel.setSourceModel(model) proxyModel.setDynamicSortFilter(True) - self.treeView.setModel(model) + self.treeView.setModel(proxyModel) self.treeView.sortByColumn(0, Qt.AscendingOrder) self.treeView.expandToDepth(0)
91faf4fd5fa3d5878e2792bcd87f81c261ec5033
wagtail/wagtailadmin/edit_bird.py
wagtail/wagtailadmin/edit_bird.py
from django.core.urlresolvers import reverse from django.template import RequestContext from django.template.loader import render_to_string class BaseItem(object): template = 'wagtailadmin/edit_bird/base_item.html' @property def can_render(self): return True def render(self, request): if self.can_render: return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request)) class EditPageItem(BaseItem): template = 'wagtailadmin/edit_bird/edit_page_item.html' def __init__(self, page): self.page = page @property def can_render(self): # Don't render if the page doesn't have an id return self.page.id def render_edit_bird(request, items): # Don't render if the user is not logged in if not request.user.is_authenticated(): return # Render the items rendered_items = [item.render(request) for item in items] # Remove any unrendered items rendered_items = [item for item in rendered_items if item] # Quit if no items rendered if not rendered_items: return # Render the edit bird return render_to_string('wagtailadmin/edit_bird/edit_bird.html', { 'items': [item.render(request) for item in items], })
from django.core.urlresolvers import reverse from django.template import RequestContext from django.template.loader import render_to_string class BaseItem(object): template = 'wagtailadmin/edit_bird/base_item.html' def render(self, request): return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request)) class EditPageItem(BaseItem): template = 'wagtailadmin/edit_bird/edit_page_item.html' def __init__(self, page): self.page = page def render(self, request): # Don't render if the page doesn't have an id if not self.page.id: return return super(EditPageItem, self).render(request) def render_edit_bird(request, items): # Don't render if the user is not logged in if not request.user.is_authenticated(): return # Render the items rendered_items = [item.render(request) for item in items] # Remove any unrendered items rendered_items = [item for item in rendered_items if item] # Quit if no items rendered if not rendered_items: return # Render the edit bird return render_to_string('wagtailadmin/edit_bird/edit_bird.html', { 'items': [item.render(request) for item in items], })
Edit bird: Clean up render method of EditPageItem
Edit bird: Clean up render method of EditPageItem
Python
bsd-3-clause
takeflight/wagtail,FlipperPA/wagtail,stevenewey/wagtail,jorge-marques/wagtail,kurtw/wagtail,Pennebaker/wagtail,janusnic/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,m-sanders/wagtail,mayapurmedia/wagtail,zerolab/wagtail,bjesus/wagtail,Tivix/wagtail,JoshBarr/wagtail,takeshineshiro/wagtail,chimeno/wagtail,benemery/wagtail,benjaoming/wagtail,Tivix/wagtail,davecranwell/wagtail,rsalmaso/wagtail,hanpama/wagtail,bjesus/wagtail,rjsproxy/wagtail,lojack/wagtail,wagtail/wagtail,mjec/wagtail,nealtodd/wagtail,nilnvoid/wagtail,iho/wagtail,lojack/wagtail,Klaudit/wagtail,wagtail/wagtail,jordij/wagtail,Pennebaker/wagtail,rsalmaso/wagtail,gasman/wagtail,Tivix/wagtail,FlipperPA/wagtail,timorieber/wagtail,gasman/wagtail,kurtrwall/wagtail,WQuanfeng/wagtail,nutztherookie/wagtail,Pennebaker/wagtail,thenewguy/wagtail,wagtail/wagtail,takeflight/wagtail,serzans/wagtail,nealtodd/wagtail,janusnic/wagtail,zerolab/wagtail,taedori81/wagtail,mjec/wagtail,takeshineshiro/wagtail,m-sanders/wagtail,gogobook/wagtail,tangentlabs/wagtail,kaedroho/wagtail,dresiu/wagtail,mixxorz/wagtail,chimeno/wagtail,nimasmi/wagtail,janusnic/wagtail,iansprice/wagtail,rsalmaso/wagtail,stevenewey/wagtail,timorieber/wagtail,darith27/wagtail,davecranwell/wagtail,rsalmaso/wagtail,quru/wagtail,iansprice/wagtail,benjaoming/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,benemery/wagtail,jorge-marques/wagtail,nrsimha/wagtail,torchbox/wagtail,kurtw/wagtail,mephizzle/wagtail,Toshakins/wagtail,benjaoming/wagtail,marctc/wagtail,thenewguy/wagtail,rv816/wagtail,WQuanfeng/wagtail,kaedroho/wagtail,mjec/wagtail,nilnvoid/wagtail,gasman/wagtail,hanpama/wagtail,darith27/wagtail,marctc/wagtail,serzans/wagtail,darith27/wagtail,chimeno/wagtail,Toshakins/wagtail,m-sanders/wagtail,jnns/wagtail,jnns/wagtail,100Shapes/wagtail,tangentlabs/wagtail,rv816/wagtail,mayapurmedia/wagtail,janusnic/wagtail,mikedingjan/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,iansprice/wagtail,kaedroho/wagtail,nimasmi/wagtail,JoshBarr/wagtail,nilnvoid/wagtail,rjsproxy/wagtail,JoshBarr/wagtail,gogobook/wagtail,taedori81/wagtail,takeflight/wagtail,jordij/wagtail,torchbox/wagtail,mikedingjan/wagtail,hanpama/wagtail,torchbox/wagtail,FlipperPA/wagtail,KimGlazebrook/wagtail-experiment,iho/wagtail,mixxorz/wagtail,chimeno/wagtail,mixxorz/wagtail,dresiu/wagtail,zerolab/wagtail,chimeno/wagtail,kurtw/wagtail,JoshBarr/wagtail,dresiu/wagtail,wagtail/wagtail,helenwarren/pied-wagtail,tangentlabs/wagtail,mephizzle/wagtail,hanpama/wagtail,davecranwell/wagtail,WQuanfeng/wagtail,hamsterbacke23/wagtail,benemery/wagtail,tangentlabs/wagtail,iansprice/wagtail,bjesus/wagtail,inonit/wagtail,inonit/wagtail,nealtodd/wagtail,takeflight/wagtail,Toshakins/wagtail,kurtrwall/wagtail,dresiu/wagtail,nilnvoid/wagtail,nutztherookie/wagtail,100Shapes/wagtail,jnns/wagtail,quru/wagtail,jordij/wagtail,willcodefortea/wagtail,rjsproxy/wagtail,benjaoming/wagtail,Toshakins/wagtail,thenewguy/wagtail,chrxr/wagtail,takeshineshiro/wagtail,nimasmi/wagtail,nutztherookie/wagtail,kaedroho/wagtail,mikedingjan/wagtail,willcodefortea/wagtail,jorge-marques/wagtail,taedori81/wagtail,mephizzle/wagtail,chrxr/wagtail,KimGlazebrook/wagtail-experiment,chrxr/wagtail,lojack/wagtail,willcodefortea/wagtail,iho/wagtail,timorieber/wagtail,thenewguy/wagtail,kurtw/wagtail,nrsimha/wagtail,zerolab/wagtail,torchbox/wagtail,hamsterbacke23/wagtail,serzans/wagtail,mixxorz/wagtail,quru/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,davecranwell/wagtail,Klaudit/wagtail,jorge-marques/wagtail,rsalmaso/wagtail,KimGlazebrook/wagtail-experiment,Klaudit/wagtail,wagtail/wagtail,taedori81/wagtail,stevenewey/wagtail,chrxr/wagtail,nrsimha/wagtail,timorieber/wagtail,FlipperPA/wagtail,iho/wagtail,Klaudit/wagtail,taedori81/wagtail,mikedingjan/wagtail,mjec/wagtail,rv816/wagtail,rjsproxy/wagtail,nrsimha/wagtail,quru/wagtail,gasman/wagtail,willcodefortea/wagtail,jordij/wagtail,gogobook/wagtail,zerolab/wagtail,darith27/wagtail,inonit/wagtail,mephizzle/wagtail,nealtodd/wagtail,helenwarren/pied-wagtail,mayapurmedia/wagtail,Tivix/wagtail,100Shapes/wagtail,gasman/wagtail,serzans/wagtail,marctc/wagtail,m-sanders/wagtail,mixxorz/wagtail,kurtrwall/wagtail,KimGlazebrook/wagtail-experiment,rv816/wagtail,thenewguy/wagtail,stevenewey/wagtail,inonit/wagtail,jorge-marques/wagtail,mayapurmedia/wagtail,marctc/wagtail,dresiu/wagtail,benemery/wagtail,nutztherookie/wagtail,Pennebaker/wagtail,bjesus/wagtail,helenwarren/pied-wagtail,jnns/wagtail
--- +++ @@ -6,13 +6,8 @@ class BaseItem(object): template = 'wagtailadmin/edit_bird/base_item.html' - @property - def can_render(self): - return True - def render(self, request): - if self.can_render: - return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request)) + return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request)) class EditPageItem(BaseItem): @@ -21,10 +16,12 @@ def __init__(self, page): self.page = page - @property - def can_render(self): + def render(self, request): # Don't render if the page doesn't have an id - return self.page.id + if not self.page.id: + return + + return super(EditPageItem, self).render(request) def render_edit_bird(request, items):
079e347bf7f05e01824a6c05495b42015e672423
keepsimplecms/jinja/globals.py
keepsimplecms/jinja/globals.py
# -*- coding: utf-8 -*- from jinja2.utils import Markup from pprint import pformat import sys import types def global_node(node_value): """ Used for the inclusion of a node in a template. It just marks the node value `node_value` as safe. """ return Markup(node_value) def global_dump(value): """ Dump `value` for debugging. """ return Markup('<pre>' + pformat(value, 1, 2, 4) + '</pre>') # save references to the defined functions functions = {} current_module = sys.modules[__name__] for x in dir(current_module): x = current_module.__dict__.get(x) if isinstance(x, types.FunctionType) \ and x.__name__.startswith('global_'): fn = x.__name__[len('global_'):] functions[fn] = x
# -*- coding: utf-8 -*- from jinja2 import Environment from jinja2.utils import Markup from pprint import pformat import sys import types env = Environment() def global_node(node_value, indent=0, indent_first=False): """ Used for the inclusion of a node in a template by indenting and flagging the HTML string as safe. """ spaces = indent * 4 tmpl = env.from_string(('{{ node_value | indent(%d, %s) }}' % (spaces, indent_first))) return Markup(tmpl.render(node_value=node_value)) def global_dump(value): """ Dump `value` for debugging. """ return Markup('<pre>' + pformat(value, 1, 2, 4) + '</pre>') # save references to the defined functions functions = {} current_module = sys.modules[__name__] for x in dir(current_module): x = current_module.__dict__.get(x) if isinstance(x, types.FunctionType) \ and x.__name__.startswith('global_'): fn = x.__name__[len('global_'):] functions[fn] = x
Add some indent options in the node Jinja macro.
Add some indent options in the node Jinja macro.
Python
bsd-3-clause
cr0cK/keepsimple.cms,cr0cK/keepsimple.cms,cr0cK/keepsimple.cms
--- +++ @@ -1,17 +1,23 @@ # -*- coding: utf-8 -*- +from jinja2 import Environment from jinja2.utils import Markup from pprint import pformat import sys import types -def global_node(node_value): +env = Environment() + + +def global_node(node_value, indent=0, indent_first=False): """ - Used for the inclusion of a node in a template. - It just marks the node value `node_value` as safe. + Used for the inclusion of a node in a template by indenting and flagging + the HTML string as safe. """ - return Markup(node_value) + spaces = indent * 4 + tmpl = env.from_string(('{{ node_value | indent(%d, %s) }}' % (spaces, indent_first))) + return Markup(tmpl.render(node_value=node_value)) def global_dump(value): """
fdfb2b1da5e7cea83bd4189bb9d998273c03a7cd
SlugifyCommand.py
SlugifyCommand.py
# encoding: utf-8 '''This adds a "slugify" command to be invoked by Sublime Text. It is made available as "Slugify" in the command palette by Default.sublime-commands. Parts of these commands are borrowed from the sublime-slug package: https://github.com/madeingnecca/sublime-slug ''' from __future__ import unicode_literals import sublime import sublime_plugin try: # This import method works in Sublime Text 2. import slugify except ImportError: # While this works in Sublime Text 3. from .slugify import slugify class SlugifyCommand(sublime_plugin.TextCommand): separator = '-' def run(self, edit): def done(value): self.separator = value self.view.run_command('slugify_replace', {'separator': self.separator}) window = self.view.window() window.show_input_panel('Separator', self.separator, done, None, None) class SlugifyReplaceCommand(sublime_plugin.TextCommand): def run(self, edit, separator): regions = self.view.sel() # Only run if there is a selection. if len(regions) > 1 or not regions[0].empty(): for region in regions: text = self.view.substr(region) self.view.replace(edit, region, slugify(text, separator))
# encoding: utf-8 '''This adds a "slugify" command to be invoked by Sublime Text. It is made available as "Slugify" in the command palette by Default.sublime-commands. Parts of these commands are borrowed from the sublime-slug package: https://github.com/madeingnecca/sublime-slug ''' from __future__ import unicode_literals import sublime import sublime_plugin try: # This import method works in Sublime Text 2. from slugify import slugify except ImportError: # While this works in Sublime Text 3. from .slugify import slugify class SlugifyCommand(sublime_plugin.TextCommand): separator = '-' def run(self, edit): def done(value): self.separator = value self.view.run_command('slugify_replace', {'separator': self.separator}) window = self.view.window() window.show_input_panel('Separator', self.separator, done, None, None) class SlugifyReplaceCommand(sublime_plugin.TextCommand): def run(self, edit, separator): regions = self.view.sel() # Only run if there is a selection. if len(regions) > 1 or not regions[0].empty(): for region in regions: text = self.view.substr(region) self.view.replace(edit, region, slugify(text, separator))
Fix broken import in Sublime Text 2.
Fix broken import in Sublime Text 2.
Python
mit
alimony/sublime-slugify
--- +++ @@ -12,7 +12,7 @@ import sublime_plugin try: # This import method works in Sublime Text 2. - import slugify + from slugify import slugify except ImportError: # While this works in Sublime Text 3. from .slugify import slugify
1d486d8035e918a83dce5a70c83149a06d982a9f
Instanssi/admin_calendar/models.py
Instanssi/admin_calendar/models.py
# -*- coding: utf-8 -*- from django.db import models from django.contrib import admin from django.contrib.auth.models import User from imagekit.models import ImageSpec from imagekit.processors import resize class CalendarEvent(models.Model): user = models.ForeignKey(User, verbose_name=u'Käyttäjä') start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.') end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True) description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True) title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32) image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True) image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG') EVENT_TYPES = ( (0, u'Aikaraja'), (1, u'Aikavaraus'), ) type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0) try: admin.site.register(CalendarEvent) except: pass
# -*- coding: utf-8 -*- from django.db import models from django.contrib import admin from django.contrib.auth.models import User from imagekit.models import ImageSpecField from imagekit.processors import ResizeToFill class CalendarEvent(models.Model): user = models.ForeignKey(User, verbose_name=u'Käyttäjä') start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.') end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True) description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True) title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32) image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True) image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG') EVENT_TYPES = ( (0, u'Aikaraja'), (1, u'Aikavaraus'), ) type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0) try: admin.site.register(CalendarEvent) except: pass
Fix to work on the latest django-imagekit
admin_calendar: Fix to work on the latest django-imagekit
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
--- +++ @@ -3,8 +3,8 @@ from django.db import models from django.contrib import admin from django.contrib.auth.models import User -from imagekit.models import ImageSpec -from imagekit.processors import resize +from imagekit.models import ImageSpecField +from imagekit.processors import ResizeToFill class CalendarEvent(models.Model): user = models.ForeignKey(User, verbose_name=u'Käyttäjä') @@ -13,7 +13,7 @@ description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True) title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32) image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True) - image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG') + image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG') EVENT_TYPES = ( (0, u'Aikaraja'), (1, u'Aikavaraus'),
bd79e3741e03a25b17670f6529e4d98bb97fa3ae
scikits/image/__init__.py
scikits/image/__init__.py
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.join(_osp.dirname(__file__), 'data') from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print "Could not load nose. Unit tests not available." return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.join(_osp.dirname(__file__), 'data') from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print "Could not load nose. Unit tests not available." return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) return logging.getLogger(name)
Add easy way to grab a logger.
Add easy way to grab a logger.
Python
bsd-3-clause
dpshelio/scikit-image,emon10005/scikit-image,chriscrosscutler/scikit-image,jwiggins/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,ClinicalGraphics/scikit-image,paalge/scikit-image,blink1073/scikit-image,emmanuelle/scikits.image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,pratapvardhan/scikit-image,michaelaye/scikit-image,ofgulban/scikit-image,michaelpacer/scikit-image,vighneshbirodkar/scikit-image,ajaybhat/scikit-image,bsipocz/scikit-image,newville/scikit-image,SamHames/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,michaelpacer/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,emon10005/scikit-image,bsipocz/scikit-image,keflavich/scikit-image,SamHames/scikit-image,robintw/scikit-image,Hiyorimi/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,rjeli/scikit-image,Midafi/scikit-image,chintak/scikit-image,paalge/scikit-image,dpshelio/scikit-image,robintw/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,emmanuelle/scikits.image,youprofit/scikit-image,emmanuelle/scikits.image,Midafi/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,keflavich/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,Britefury/scikit-image,GaelVaroquaux/scikits.image,chintak/scikit-image,emmanuelle/scikits.image,youprofit/scikit-image,GaelVaroquaux/scikits.image,juliusbierk/scikit-image,WarrenWeckesser/scikits-image,rjeli/scikit-image,bennlich/scikit-image
--- +++ @@ -25,3 +25,7 @@ if test is None: del test +def get_log(name): + import logging, sys + logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + return logging.getLogger(name)
005ff4e6584727ba6ca61b49a57621dd7f17cd6a
examples/client.py
examples/client.py
from socketio_client.manager import Manager import gevent from gevent import monkey monkey.patch_socket() import logging logging.basicConfig(level=logging.DEBUG) io = Manager('localhost', 8000, auto_connect=False) chat = io.socket('/chat') @chat.on('welcome') def on_hello(*args, **kwargs): print args kwargs['callback']("thanks!") @chat.on_connect() def on_connect(): chat.emit("hello", "blablabla") chat.connect() gevent.wait()
from socketio_client.manager import Manager import gevent from gevent import monkey monkey.patch_socket() import logging logging.basicConfig(level=logging.DEBUG) io = Manager('http', 'localhost', 8000, auto_connect=False) chat = io.socket('/chat') @chat.on('welcome') def on_hello(*args, **kwargs): print args kwargs['callback']("thanks!") @chat.on_connect() def on_connect(): chat.emit("hello", "blablabla") chat.connect() gevent.wait()
Update example according to modified Manager constructor.
Update example according to modified Manager constructor. A scheme parameter is now required on Manager instantiation
Python
mit
veo-labs/python-socketio-client
--- +++ @@ -7,7 +7,7 @@ import logging logging.basicConfig(level=logging.DEBUG) -io = Manager('localhost', 8000, auto_connect=False) +io = Manager('http', 'localhost', 8000, auto_connect=False) chat = io.socket('/chat') @chat.on('welcome')
ddcf3490990f9b78f0009937bc9ddc2df0331b8e
lib/booki/account/templatetags/profile.py
lib/booki/account/templatetags/profile.py
import os from django.db.models import get_model from django.template import Library, Node, TemplateSyntaxError, resolve_variable from django.conf import settings from booki.account.models import UserProfile register = Library() class ProfileImageNode(Node): def __init__(self, user): self.user = user def render(self, context): user = resolve_variable(self.user, context) # should check if it exists and etc profile = UserProfile.objects.get(user=user) if not profile.image: return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL filename = profile.image.name return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1]) @register.tag def profile_image(parser, token): """ Django tag. Shows user profile image. If user does not have defined image it will show anonymous image. @type token: C{string} @param token: Variable name that points to C{User} object. """ bits = token.contents.split() if len(bits) != 2: raise TemplateSyntaxError return ProfileImageNode(bits[1])
import os from django.db.models import get_model from django.template import Library, Node, TemplateSyntaxError, resolve_variable from django.conf import settings from booki.account.models import UserProfile register = Library() class ProfileImageNode(Node): def __init__(self, user): self.user = user def render(self, context): user = resolve_variable(self.user, context) # should check if it exists and etc profile = UserProfile.objects.get(user=user) if not profile.image: return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL filename = profile.image.name return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1]) @register.tag def profile_image(parser, token): """ Django tag. Shows user profile image. If user does not have defined image it will show anonymous image. @type token: C{string} @param token: Variable name that points to C{User} object. """ bits = token.contents.split() if len(bits) != 2: raise TemplateSyntaxError return ProfileImageNode(bits[1])
Fix image link to anonymous user.
Fix image link to anonymous user.
Python
agpl-3.0
kronoscode/Booktype,MiczFlor/Booktype,rob-hills/Booktype,ride90/Booktype,okffi/booktype,kronoscode/Booktype,danielhjames/Booktype,danielhjames/Booktype,eos87/Booktype,ride90/Booktype,MiczFlor/Booktype,danielhjames/Booktype,kronoscode/Booktype,btat/Booktype,ride90/Booktype,aerkalov/Booktype,danielhjames/Booktype,btat/Booktype,ride90/Booktype,sourcefabric/Booktype,okffi/booktype,danielhjames/Booktype,rob-hills/Booktype,okffi/booktype,sourcefabric/Booktype,kronoscode/Booktype,btat/Booktype,ride90/Booktype,olegpshenichniy/Booktype,kronoscode/Booktype,olegpshenichniy/Booktype,eos87/Booktype,sourcefabric/Booktype,MiczFlor/Booktype,btat/Booktype,okffi/booktype,MiczFlor/Booktype,MiczFlor/Booktype,rob-hills/Booktype,olegpshenichniy/Booktype,olegpshenichniy/Booktype,sourcefabric/Booktype,eos87/Booktype,okffi/booktype,aerkalov/Booktype,rob-hills/Booktype,eos87/Booktype,olegpshenichniy/Booktype,aerkalov/Booktype,eos87/Booktype,sourcefabric/Booktype
--- +++ @@ -19,7 +19,7 @@ profile = UserProfile.objects.get(user=user) if not profile.image: - return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL + return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL filename = profile.image.name
7fb284ad29098a4397c7ac953e2d9acb89cf089e
notification/backends/email.py
notification/backends/email.py
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed.
Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed.
Python
mit
theatlantic/django-notification,theatlantic/django-notification
--- +++ @@ -5,6 +5,7 @@ class EmailBackend(NotificationBackend): + sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt']
b28ceb8631446b57abb48be6c76db843ec747221
demo/set-sas-token.py
demo/set-sas-token.py
#!/usr/bin/env python from __future__ import print_function import os from subprocess import check_output from sys import argv, stdout RSGRP = "travistestresourcegroup" STACC = "travistestresourcegr3014" def run(command): command = command.replace('az ', '', 1) cmd = 'python -m azure.cli {}'.format(command) print(cmd) out = check_output(cmd) return out.decode('utf-8') # get storage account connection string out = run('az storage account connection-string -g {} -n {}'.format(RSGRP, STACC)) connection_string = out.replace('Connection String : ', '') os.environ['AZURE_STORAGE_CONNECTION_STRING'] = connection_string sas_token = run('az storage account generate-sas --services b --resource-types sco --permission rwdl --expiry 2017-01-01T00:00Z' .format(connection_string)).strip() os.environ.pop('AZURE_STORAGE_CONNECTION_STRING', None) os.environ['AZURE_STORAGE_ACCOUNT'] = STACC os.environ['AZURE_SAS_TOKEN'] = sas_token print('\n=== Listing storage containers...===') print(run('az storage container list')) print('\n=== Trying to list storage shares *SHOULD FAIL*... ===') print('az storage container list --sas-token \"{}\"'.format(sas_token)) print(run('az storage share list')) exit(0)
#!/usr/bin/env python from __future__ import print_function import os from subprocess import check_output from sys import argv, stdout RSGRP = "travistestresourcegroup" STACC = "travistestresourcegr3014" def cmd(command): """ Accepts a command line command as a string and returns stdout in UTF-8 format """ return check_output([str(x) for x in command.split()]).decode('utf-8') # get storage account connection string out = cmd('az storage account connection-string -g {} -n {}'.format(RSGRP, STACC)) connection_string = out.replace('Connection String : ', '') os.environ['AZURE_STORAGE_CONNECTION_STRING'] = connection_string sas_token = cmd('az storage account generate-sas --services b --resource-types sco --permission rwdl --expiry 2017-01-01T00:00Z' .format(connection_string)).strip() os.environ.pop('AZURE_STORAGE_CONNECTION_STRING', None) os.environ['AZURE_STORAGE_ACCOUNT'] = STACC os.environ['AZURE_SAS_TOKEN'] = sas_token print('\n=== Listing storage containers...===') print(cmd('az storage container list')) print('\n=== Trying to list storage shares *SHOULD FAIL*... ===') print('az storage container list --sas-token \"{}\"'.format(sas_token)) print(cmd('az storage share list')) exit(0)
Update python scripts to run on OSX.
Update python scripts to run on OSX.
Python
mit
QingChenmsft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli
--- +++ @@ -9,29 +9,26 @@ RSGRP = "travistestresourcegroup" STACC = "travistestresourcegr3014" -def run(command): - command = command.replace('az ', '', 1) - cmd = 'python -m azure.cli {}'.format(command) - print(cmd) - out = check_output(cmd) - return out.decode('utf-8') +def cmd(command): + """ Accepts a command line command as a string and returns stdout in UTF-8 format """ + return check_output([str(x) for x in command.split()]).decode('utf-8') # get storage account connection string -out = run('az storage account connection-string -g {} -n {}'.format(RSGRP, STACC)) +out = cmd('az storage account connection-string -g {} -n {}'.format(RSGRP, STACC)) connection_string = out.replace('Connection String : ', '') os.environ['AZURE_STORAGE_CONNECTION_STRING'] = connection_string -sas_token = run('az storage account generate-sas --services b --resource-types sco --permission rwdl --expiry 2017-01-01T00:00Z' +sas_token = cmd('az storage account generate-sas --services b --resource-types sco --permission rwdl --expiry 2017-01-01T00:00Z' .format(connection_string)).strip() os.environ.pop('AZURE_STORAGE_CONNECTION_STRING', None) os.environ['AZURE_STORAGE_ACCOUNT'] = STACC os.environ['AZURE_SAS_TOKEN'] = sas_token print('\n=== Listing storage containers...===') -print(run('az storage container list')) +print(cmd('az storage container list')) print('\n=== Trying to list storage shares *SHOULD FAIL*... ===') print('az storage container list --sas-token \"{}\"'.format(sas_token)) -print(run('az storage share list')) +print(cmd('az storage share list')) exit(0)
5d65b35623d2dbdb518a6e4a7f95ec224bf879a1
ros_start/scritps/service_client.py
ros_start/scritps/service_client.py
#!/usr/bin/env python import rospy from std_srvs.srv import Empty def service_client(): rospy.loginfo('waiting service') rospy.wait_for_service('call_me') try: service = rospy.ServiceProxy('call_me', Empty) response = service() except rospy.ServiceException, e: print "Service call failed: %s" % e if __name__ == "__main__": service_client()
#!/usr/bin/env python import rospy from std_srvs.srv import Empty def call_service(): rospy.loginfo('waiting service') rospy.wait_for_service('call_me') try: service = rospy.ServiceProxy('call_me', Empty) response = service() except rospy.ServiceException, e: print "Service call failed: %s" % e def service_client(): rospy.init_node('service_client') call_service() rospy.spin() if __name__ == "__main__": service_client()
Add initialization of the service client node.
Add initialization of the service client node.
Python
bsd-2-clause
OTL/ros_book_programs,OTL/ros_book_programs
--- +++ @@ -3,14 +3,20 @@ import rospy from std_srvs.srv import Empty -def service_client(): +def call_service(): rospy.loginfo('waiting service') rospy.wait_for_service('call_me') + try: service = rospy.ServiceProxy('call_me', Empty) response = service() except rospy.ServiceException, e: print "Service call failed: %s" % e +def service_client(): + rospy.init_node('service_client') + call_service() + rospy.spin() + if __name__ == "__main__": service_client()
3f7a9d900a1f2cd2f5522735815c999040a920e0
pajbot/web/routes/api/users.py
pajbot/web/routes/api/users.py
from flask_restful import Resource from pajbot.managers.redis import RedisManager from pajbot.managers.user import UserManager from pajbot.streamhelper import StreamHelper class APIUser(Resource): @staticmethod def get(username): user = UserManager.find_static(username) if not user: return {"error": "Not found"}, 404 redis = RedisManager.get() key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer()) rank = redis.zrevrank(key, user.username) if rank is None: rank = redis.zcard(key) else: rank = rank + 1 return user.jsonify() def init(api): api.add_resource(APIUser, "/users/<username>")
from flask_restful import Resource from pajbot.managers.redis import RedisManager from pajbot.managers.user import UserManager from pajbot.streamhelper import StreamHelper class APIUser(Resource): @staticmethod def get(username): user = UserManager.find_static(username) if not user: return {"error": "Not found"}, 404 return user.jsonify() def init(api): api.add_resource(APIUser, "/users/<username>")
Remove dead code in get user API endpoint
Remove dead code in get user API endpoint
Python
mit
pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot
--- +++ @@ -12,14 +12,6 @@ if not user: return {"error": "Not found"}, 404 - redis = RedisManager.get() - key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer()) - rank = redis.zrevrank(key, user.username) - if rank is None: - rank = redis.zcard(key) - else: - rank = rank + 1 - return user.jsonify()
ddfdddad65a96198d6949c138ad9980188250b92
alembic/versions/35597d56e8d_add_ckan_boolean_to_mod_table.py
alembic/versions/35597d56e8d_add_ckan_boolean_to_mod_table.py
"""Add ckan boolean to mod table Revision ID: 35597d56e8d Revises: 18af22fa9e4 Create Date: 2014-12-12 20:11:22.250080 """ # revision identifiers, used by Alembic. revision = '35597d56e8d' down_revision = '18af22fa9e4' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('mod', sa.Column('ckan', sa.Boolean(), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('mod', 'ckan') ### end Alembic commands ###
"""Add ckan boolean to mod table Revision ID: 35597d56e8d Revises: 18af22fa9e4 Create Date: 2014-12-12 20:11:22.250080 """ # revision identifiers, used by Alembic. revision = '35597d56e8d' down_revision = '50b5a95300c' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('mod', sa.Column('ckan', sa.Boolean(), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('mod', 'ckan') ### end Alembic commands ###
Fix loop in alembic history
Fix loop in alembic history
Python
mit
Kerbas-ad-astra/KerbalStuff,Kerbas-ad-astra/KerbalStuff,toadicus/KerbalStuff,KerbalStuff/KerbalStuff,EIREXE/SpaceDock,ModulousSmash/Modulous,KerbalStuff/KerbalStuff,ModulousSmash/Modulous,ModulousSmash/Modulous,toadicus/KerbalStuff,ModulousSmash/Modulous,EIREXE/SpaceDock,EIREXE/SpaceDock,toadicus/KerbalStuff,Kerbas-ad-astra/KerbalStuff,EIREXE/SpaceDock,KerbalStuff/KerbalStuff
--- +++ @@ -8,7 +8,7 @@ # revision identifiers, used by Alembic. revision = '35597d56e8d' -down_revision = '18af22fa9e4' +down_revision = '50b5a95300c' from alembic import op import sqlalchemy as sa
89112aa8e5ffda6763db2f49a2d32cff5f6b15fd
lib/storage/gcs.py
lib/storage/gcs.py
import gevent.monkey gevent.monkey.patch_all() import logging import boto.gs.connection import boto.gs.key import cache from boto_base import BotoStorage logger = logging.getLogger(__name__) class GSStorage(BotoStorage): def __init__(self, config): BotoStorage.__init__(self, config) def makeConnection(self): return boto.gs.connection.GSConnection( self._config.gs_access_key, self._config.gs_secret_key, is_secure=(self._config.gs_secure is True)) def makeKey(self, path): return boto.gs.key.Key(self._boto_bucket, path) @cache.put def put_content(self, path, content): path = self._init_path(path) key = self.makeKey(path) key.set_contents_from_string(content) return path def stream_write(self, path, fp): # Minimum size of upload part size on GS is 5MB buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) key = boto.gs.key.Key(self._boto_bucket) key.key = path key.set_contents_from_string(fp.read())
import gevent.monkey gevent.monkey.patch_all() import logging import boto.gs.connection import boto.gs.key import cache from boto_base import BotoStorage logger = logging.getLogger(__name__) class GSStorage(BotoStorage): def __init__(self, config): BotoStorage.__init__(self, config) def makeConnection(self): return boto.gs.connection.GSConnection( self._config.gs_access_key, self._config.gs_secret_key, is_secure=(self._config.gs_secure is True)) def makeKey(self, path): return boto.gs.key.Key(self._boto_bucket, path) @cache.put def put_content(self, path, content): path = self._init_path(path) key = self.makeKey(path) key.set_contents_from_string(content) return path def stream_write(self, path, fp): # Minimum size of upload part size on GS is 5MB buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) key = self.makeKey(path) key.set_contents_from_string(fp.read())
Fix up some key construction.
Fix up some key construction.
Python
apache-2.0
depay/docker-registry,alephcloud/docker-registry,HubSpot/docker-registry,dhiltgen/docker-registry,catalyst-zero/docker-registry,ewindisch/docker-registry,hex108/docker-registry,scrapinghub/docker-registry,ewindisch/docker-registry,shipyard/docker-registry,mdshuai/docker-registry,OnePaaS/docker-registry,nunogt/docker-registry,shakamunyi/docker-registry,viljaste/docker-registry-1,liggitt/docker-registry,hex108/docker-registry,csrwng/docker-registry,tangkun75/docker-registry,mboersma/docker-registry,pombredanne/docker-registry,shakamunyi/docker-registry,hpcloud/docker-registry,dhiltgen/docker-registry,scrapinghub/docker-registry,Carrotzpc/docker-registry,ptisserand/docker-registry,dine1987/Docker,yuriyf/docker-registry,deis/docker-registry,nunogt/docker-registry,alephcloud/docker-registry,whuwxl/docker-registry,ActiveState/docker-registry,yuriyf/docker-registry,Haitianisgood/docker-registry,dhiltgen/docker-registry,HubSpot/docker-registry,stormltf/docker-registry,ptisserand/docker-registry,ewindisch/docker-registry,Carrotzpc/docker-registry,docker/docker-registry,hex108/docker-registry,atyenoria/docker-registry,pombredanne/docker-registry,scrapinghub/docker-registry,GoogleCloudPlatform/docker-registry-driver-gcs,depay/docker-registry,wakermahmud/docker-registry,deis/docker-registry,OnePaaS/docker-registry,dalvikchen/docker-registry,fabianofranz/docker-registry,stormltf/docker-registry,whuwxl/docker-registry,Haitianisgood/docker-registry,cnh/docker-registry,fabianofranz/docker-registry,kireal/docker-registry,dedalusdev/docker-registry,liggitt/docker-registry,stormltf/docker-registry,ken-saka/docker-registry,dedalusdev/docker-registry,viljaste/docker-registry-1,ActiveState/docker-registry,shipyard/docker-registry,ptisserand/docker-registry,catalyst-zero/docker-registry,catalyst-zero/docker-registry,whuwxl/docker-registry,kireal/docker-registry,kireal/docker-registry,alephcloud/docker-registry,andrew-plunk/docker-registry,ken-saka/docker-registry,shipyard/docker-registry,dine1987/Docker,Haitianisgood/docker-registry,andrew-plunk/docker-registry,fabianofranz/docker-registry,mboersma/docker-registry,dine1987/Docker,docker/docker-registry,atyenoria/docker-registry,csrwng/docker-registry,hpcloud/docker-registry,dedalusdev/docker-registry,mdshuai/docker-registry,tangkun75/docker-registry,hpcloud/docker-registry,mdshuai/docker-registry,yuriyf/docker-registry,pombredanne/docker-registry,docker/docker-registry,wakermahmud/docker-registry,cnh/docker-registry,GoogleCloudPlatform/docker-registry-driver-gcs,mboersma/docker-registry,cnh/docker-registry,viljaste/docker-registry-1,HubSpot/docker-registry,ken-saka/docker-registry,Carrotzpc/docker-registry,dalvikchen/docker-registry,tangkun75/docker-registry,nunogt/docker-registry,liggitt/docker-registry,atyenoria/docker-registry,depay/docker-registry,csrwng/docker-registry,dalvikchen/docker-registry,wakermahmud/docker-registry,shakamunyi/docker-registry,deis/docker-registry,andrew-plunk/docker-registry,ActiveState/docker-registry,OnePaaS/docker-registry
--- +++ @@ -42,6 +42,5 @@ if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) - key = boto.gs.key.Key(self._boto_bucket) - key.key = path + key = self.makeKey(path) key.set_contents_from_string(fp.read())
e9a11dac0d125d90d5e0b1783b215b9007334d02
contentstore/management/commands/tests/test_sync_schedules.py
contentstore/management/commands/tests/test_sync_schedules.py
from io import BytesIO from django.core.management import call_command from django.test import TestCase from mock import patch from contentstore.models import Schedule from seed_stage_based_messaging import test_utils as utils class SyncSchedulesTests(TestCase): @patch('contentstore.management.commands.sync_schedules.sync_schedule') def test_schedule_sync_called(self, sync_task): """ The sync schedules management command should call the sync schedule task for every schedule. """ utils.disable_signals() schedule = Schedule.objects.create() utils.enable_signals() out = BytesIO() call_command('sync_schedules', stdout=out) sync_task.assert_called_once_with(str(schedule.id)) self.assertIn(str(schedule.id), out.getvalue()) self.assertIn('Synchronised 1 schedule/s', out.getvalue())
from six import BytesIO from django.core.management import call_command from django.test import TestCase from mock import patch from contentstore.models import Schedule from seed_stage_based_messaging import test_utils as utils class SyncSchedulesTests(TestCase): @patch('contentstore.management.commands.sync_schedules.sync_schedule') def test_schedule_sync_called(self, sync_task): """ The sync schedules management command should call the sync schedule task for every schedule. """ utils.disable_signals() schedule = Schedule.objects.create() utils.enable_signals() out = BytesIO() call_command('sync_schedules', stdout=out) sync_task.assert_called_once_with(str(schedule.id)) self.assertIn(str(schedule.id), out.getvalue()) self.assertIn('Synchronised 1 schedule/s', out.getvalue())
Use six for python 2/3 bytes
Use six for python 2/3 bytes
Python
bsd-3-clause
praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-staged-based-messaging
--- +++ @@ -1,4 +1,4 @@ -from io import BytesIO +from six import BytesIO from django.core.management import call_command from django.test import TestCase from mock import patch
293b1d492cfd3c5542c78acffbbdebf4933b6d85
django_db_geventpool/backends/postgresql_psycopg2/creation.py
django_db_geventpool/backends/postgresql_psycopg2/creation.py
# coding=utf-8 from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation class DatabaseCreation(OriginalDatabaseCreation): def _destroy_test_db(self, test_database_name, verbosity): self.connection.closeall() return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
# coding=utf-8 from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation class DatabaseCreation(OriginalDatabaseCreation): def _destroy_test_db(self, test_database_name, verbosity): self.connection.closeall() return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity) def _create_test_db(self, verbosity, autoclobber): self.connection.closeall() return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
Handle open connections when creating the test database
Handle open connections when creating the test database
Python
apache-2.0
jneight/django-db-geventpool,PreppyLLC-opensource/django-db-geventpool
--- +++ @@ -7,4 +7,6 @@ def _destroy_test_db(self, test_database_name, verbosity): self.connection.closeall() return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity) - + def _create_test_db(self, verbosity, autoclobber): + self.connection.closeall() + return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
0aff137a210debd9ea18793a98c043a5151d9524
src/Compiler/VM/arithmetic_exprs.py
src/Compiler/VM/arithmetic_exprs.py
from Helpers.string import * def binop_aexp(commands, env, op, left, right): left.compile_vm(commands, env) right.compile_vm(commands, env) if op == '+': value = assemble(Add) elif op == '-': value = assemble(Sub) elif op == '*': value = assemble(Mul) elif op == '/': value = assemble(Div) elif op == '%': value = assemble(Mod) else: raise RuntimeError('unknown operator: ' + op) commands.append(value) def int_aexp(commands, env, i): commands.append(assemble(Push, i)) def var_aexp(commands, env, name): var_type = Environment.get_var_type(env, name) var_value = Environment.get_var(env, name) if var_type == 'IntAexp': commands.append(assemble(Load, var_value)) elif var_type == 'Char': commands.append(assemble(Load, var_value)) elif var_type == 'String': String.compile_get(commands, env, var_value)
from Helpers.string import * def binop_aexp(commands, env, op, left, right): left.compile_vm(commands, env) right.compile_vm(commands, env) if op == '+': value = assemble(Add) elif op == '-': value = assemble(Sub) elif op == '*': value = assemble(Mul) elif op == '/': value = assemble(Div) elif op == '%': value = assemble(Mod) else: raise RuntimeError('unknown operator: ' + op) commands.append(value) def int_aexp(commands, env, i): commands.append(assemble(Push, i)) def var_aexp(commands, env, name): var_type = Environment.get_var_type(env, name) var_value = Environment.get_var(env, name) if var_type == 'String': String.compile_get(commands, env, var_value) else: commands.append(assemble(Load, var_value))
Fix compiling problem for runtime variables
Fix compiling problem for runtime variables
Python
mit
PetukhovVictor/compiler,PetukhovVictor/compiler
--- +++ @@ -23,9 +23,7 @@ def var_aexp(commands, env, name): var_type = Environment.get_var_type(env, name) var_value = Environment.get_var(env, name) - if var_type == 'IntAexp': + if var_type == 'String': + String.compile_get(commands, env, var_value) + else: commands.append(assemble(Load, var_value)) - elif var_type == 'Char': - commands.append(assemble(Load, var_value)) - elif var_type == 'String': - String.compile_get(commands, env, var_value)
be9614da32a3c626d2a8e434a43d411d30451f7f
mopidy/__main__.py
mopidy/__main__.py
import asyncore import logging import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))) from mopidy import get_class, settings, SettingsError from mopidy.mpd.server import MpdServer logger = logging.getLogger('mopidy') def main(): _setup_logging(2) mixer = get_class(settings.MIXER)() backend = get_class(settings.BACKENDS[0])(mixer=mixer) MpdServer(backend=backend) asyncore.loop() def _setup_logging(verbosity_level): if verbosity_level == 0: level = logging.WARNING elif verbosity_level == 2: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( format=settings.CONSOLE_LOG_FORMAT, level=level, ) if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit('\nInterrupted by user') except SettingsError, e: sys.exit('%s' % e)
import asyncore import logging import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))) from mopidy import get_class, settings, SettingsError from mopidy.mpd.server import MpdServer logger = logging.getLogger('mopidy') def main(): _setup_logging(2) # multiprocessing branch plan # --------------------------- # # TODO Init backend in new Process (named core?) # TODO Init mixer from backend # TODO Init MpdHandler from backend/core # TODO Init MpdServer in MainThread or in new Process? mixer = get_class(settings.MIXER)() backend = get_class(settings.BACKENDS[0])(mixer=mixer) MpdServer(backend=backend) asyncore.loop() def _setup_logging(verbosity_level): if verbosity_level == 0: level = logging.WARNING elif verbosity_level == 2: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( format=settings.CONSOLE_LOG_FORMAT, level=level, ) if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit('\nInterrupted by user') except SettingsError, e: sys.exit('%s' % e)
Add todo list for multiprocessing branch
Add todo list for multiprocessing branch
Python
apache-2.0
quartz55/mopidy,mopidy/mopidy,jmarsik/mopidy,tkem/mopidy,quartz55/mopidy,rawdlite/mopidy,mopidy/mopidy,ali/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,dbrgn/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,quartz55/mopidy,vrs01/mopidy,dbrgn/mopidy,mokieyue/mopidy,kingosticks/mopidy,tkem/mopidy,abarisain/mopidy,quartz55/mopidy,priestd09/mopidy,jcass77/mopidy,rawdlite/mopidy,bencevans/mopidy,rawdlite/mopidy,bencevans/mopidy,kingosticks/mopidy,diandiankan/mopidy,jodal/mopidy,mokieyue/mopidy,woutervanwijk/mopidy,vrs01/mopidy,mokieyue/mopidy,tkem/mopidy,hkariti/mopidy,bencevans/mopidy,bacontext/mopidy,dbrgn/mopidy,swak/mopidy,jmarsik/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,rawdlite/mopidy,pacificIT/mopidy,adamcik/mopidy,glogiotatidis/mopidy,adamcik/mopidy,ZenithDK/mopidy,diandiankan/mopidy,hkariti/mopidy,mopidy/mopidy,ali/mopidy,mokieyue/mopidy,adamcik/mopidy,liamw9534/mopidy,jodal/mopidy,bacontext/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,jmarsik/mopidy,abarisain/mopidy,glogiotatidis/mopidy,swak/mopidy,tkem/mopidy,SuperStarPL/mopidy,jodal/mopidy,hkariti/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,bacontext/mopidy,priestd09/mopidy,jcass77/mopidy,bacontext/mopidy,ZenithDK/mopidy,vrs01/mopidy,liamw9534/mopidy,ali/mopidy,ali/mopidy,ZenithDK/mopidy,jcass77/mopidy,dbrgn/mopidy,glogiotatidis/mopidy,swak/mopidy,SuperStarPL/mopidy,bencevans/mopidy,hkariti/mopidy,jmarsik/mopidy,priestd09/mopidy,swak/mopidy
--- +++ @@ -13,6 +13,15 @@ def main(): _setup_logging(2) + + # multiprocessing branch plan + # --------------------------- + # + # TODO Init backend in new Process (named core?) + # TODO Init mixer from backend + # TODO Init MpdHandler from backend/core + # TODO Init MpdServer in MainThread or in new Process? + mixer = get_class(settings.MIXER)() backend = get_class(settings.BACKENDS[0])(mixer=mixer) MpdServer(backend=backend)
c3bac71b19842d9010390996c094119ed25566ab
class_namespaces/scope_proxy.py
class_namespaces/scope_proxy.py
"""Base class for Namespace proxies in class creation.""" import weakref from . import ops from .proxy import _Proxy _PROXY_INFOS = weakref.WeakKeyDictionary() class _ScopeProxy(_Proxy): """Proxy object for manipulating namespaces during class creation.""" __slots__ = '__weakref__', def __init__(self, dct, container): _PROXY_INFOS[self] = container container[self] = dct def __dir__(self): # This line will fire if dir(ns) is done during class creation. return _PROXY_INFOS[self][self] def __getattribute__(self, name): dct = _PROXY_INFOS[self][self] try: return dct[name] # These lines will fire if a non-existent namespace attribute is gotten # during class creation. except KeyError: raise AttributeError(name) def __setattr__(self, name, value): _PROXY_INFOS[self][self][name] = value def __delattr__(self, name): ops.delete(_PROXY_INFOS[self][self], name) def __enter__(self): return _PROXY_INFOS[self][self].__enter__() def __exit__(self, exc_type, exc_value, traceback): return _PROXY_INFOS[self][self].__exit__( exc_type, exc_value, traceback)
"""Base class for Namespace proxies in class creation.""" import weakref from . import ops from .proxy import _Proxy _PROXY_INFOS = weakref.WeakKeyDictionary() class _ScopeProxy(_Proxy): """Proxy object for manipulating namespaces during class creation.""" __slots__ = '__weakref__', def __init__(self, dct, container): _PROXY_INFOS[self] = container container[self] = dct def __dir__(self): # This line will fire if dir(ns) is done during class creation. return _PROXY_INFOS[self][self] def __getattribute__(self, name): # Have to add some dependencies back... from .namespaces import Namespace dct = _PROXY_INFOS[self][self] try: value = dct[name] # These lines will fire if a non-existent namespace attribute is gotten # during class creation. except KeyError: raise AttributeError(name) if isinstance(value, Namespace): value = type(self)(value) return value def __setattr__(self, name, value): _PROXY_INFOS[self][self][name] = value def __delattr__(self, name): ops.delete(_PROXY_INFOS[self][self], name) def __enter__(self): return _PROXY_INFOS[self][self].__enter__() def __exit__(self, exc_type, exc_value, traceback): return _PROXY_INFOS[self][self].__exit__( exc_type, exc_value, traceback)
Fix for bug. Overall somewhat unfortunate.
Fix for bug. Overall somewhat unfortunate.
Python
mit
mwchase/class-namespaces,mwchase/class-namespaces
--- +++ @@ -23,13 +23,18 @@ return _PROXY_INFOS[self][self] def __getattribute__(self, name): + # Have to add some dependencies back... + from .namespaces import Namespace dct = _PROXY_INFOS[self][self] try: - return dct[name] + value = dct[name] # These lines will fire if a non-existent namespace attribute is gotten # during class creation. except KeyError: raise AttributeError(name) + if isinstance(value, Namespace): + value = type(self)(value) + return value def __setattr__(self, name, value): _PROXY_INFOS[self][self][name] = value
2e95901ee37100f855a5f30e6143920ef2b56904
odinweb/_compat.py
odinweb/_compat.py
# -*- coding: utf-8 -*- """ Py27 Support ~~~~~~~~~~~~ Like odin this library will support Python 2.7 through to version 2.0. From this point onwards Python 3.5+ will be required. """ from __future__ import unicode_literals import sys __all__ = ( 'PY2', 'PY3', 'string_types', 'integer_types', 'text_type', 'binary_type', 'range', 'with_metaclass' ) PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: string_types = str, integer_types = int, text_type = str binary_type = bytes else: string_types = basestring, integer_types = (int, long) text_type = unicode binary_type = str if PY2: range = xrange else: range = range def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {})
# -*- coding: utf-8 -*- """ Py27 Support ~~~~~~~~~~~~ Like odin this library will support Python 2.7 through to version 2.0. From this point onwards Python 3.5+ will be required. """ import sys __all__ = ( 'PY2', 'PY3', 'string_types', 'integer_types', 'text_type', 'binary_type', 'range', 'with_metaclass' ) PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: string_types = str, integer_types = int, text_type = str binary_type = bytes else: string_types = basestring, integer_types = (int, long) text_type = unicode binary_type = str if PY2: range = xrange else: range = range def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {})
Remove unicode literals to fix with_metaclass method
Remove unicode literals to fix with_metaclass method
Python
bsd-3-clause
python-odin/odinweb,python-odin/odinweb
--- +++ @@ -8,8 +8,6 @@ From this point onwards Python 3.5+ will be required. """ -from __future__ import unicode_literals - import sys __all__ = (
da59d4334eb1a6f77bd0a9599614a6289ef843e4
pytest-server-fixtures/tests/integration/test_mongo_server.py
pytest-server-fixtures/tests/integration/test_mongo_server.py
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count_documents({}) == 0 coll.insert_one({'a': 'b'}) assert coll.count_documents({}) == 1
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1
Revert "fix deprecation warnings in mongo"
Revert "fix deprecation warnings in mongo" This reverts commit 5d449ff9376e7c0a3c78f2b2d631ab0ecd08fe81.
Python
mit
manahl/pytest-plugins,manahl/pytest-plugins
--- +++ @@ -4,13 +4,13 @@ def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete - mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) + mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection - assert coll.count_documents({}) == 0 - coll.insert_one({'a': 'b'}) - assert coll.count_documents({}) == 1 + assert coll.count() == 0 + coll.insert({'a': 'b'}) + assert coll.count() == 1
38365839856658bcf870e286a27f0de784a255a2
test/tools/lldb-mi/TestMiLibraryLoaded.py
test/tools/lldb-mi/TestMiLibraryLoaded.py
""" Test lldb-mi =library-loaded notifications. """ import lldbmi_testcase from lldbtest import * import unittest2 class MiLibraryLoadedTestCase(lldbmi_testcase.MiTestCaseBase): mydir = TestBase.compute_mydir(__file__) @lldbmi_test @expectedFailureWindows("llvm.org/pr22274: need a pexpect replacement for windows") @skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races def test_lldbmi_library_loaded(self): """Test that 'lldb-mi --interpreter' shows the =library-loaded notifications.""" self.spawnLldbMi(args = None) # Load executable self.runCmd("-file-exec-and-symbols %s" % self.myexe) self.expect("\^done") # Test =library-loaded import os path = os.path.join(os.getcwd(), self.myexe) symbols_path = os.path.join(path + ".dSYM", "Contents", "Resources", "DWARF", self.myexe) self.expect("=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"1\",symbols-path=\"%s\",loaded_addr=\"-\"" % (path, path, path, symbols_path), exactly = True) if __name__ == '__main__': unittest2.main()
""" Test lldb-mi =library-loaded notifications. """ import lldbmi_testcase from lldbtest import * import unittest2 class MiLibraryLoadedTestCase(lldbmi_testcase.MiTestCaseBase): mydir = TestBase.compute_mydir(__file__) @lldbmi_test @expectedFailureWindows("llvm.org/pr22274: need a pexpect replacement for windows") @skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races def test_lldbmi_library_loaded(self): """Test that 'lldb-mi --interpreter' shows the =library-loaded notifications.""" self.spawnLldbMi(args = None) # Load executable self.runCmd("-file-exec-and-symbols %s" % self.myexe) self.expect("\^done") # Test =library-loaded import os path = os.path.join(os.getcwd(), self.myexe) symbols_path = os.path.join(path + ".dSYM", "Contents", "Resources", "DWARF", self.myexe) self.expect([ "=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"1\",symbols-path=\"%s\",loaded_addr=\"-\"" % (path, path, path, symbols_path), "=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"0\",loaded_addr=\"-\"" % (path, path, path) ], exactly = True) if __name__ == '__main__': unittest2.main()
Fix MiLibraryLoadedTestCase.test_lldbmi_library_loaded test on Linux (MI)
Fix MiLibraryLoadedTestCase.test_lldbmi_library_loaded test on Linux (MI) git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@236229 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb
--- +++ @@ -26,8 +26,10 @@ import os path = os.path.join(os.getcwd(), self.myexe) symbols_path = os.path.join(path + ".dSYM", "Contents", "Resources", "DWARF", self.myexe) - self.expect("=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"1\",symbols-path=\"%s\",loaded_addr=\"-\"" % (path, path, path, symbols_path), - exactly = True) + self.expect([ + "=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"1\",symbols-path=\"%s\",loaded_addr=\"-\"" % (path, path, path, symbols_path), + "=library-loaded,id=\"%s\",target-name=\"%s\",host-name=\"%s\",symbols-loaded=\"0\",loaded_addr=\"-\"" % (path, path, path) + ], exactly = True) if __name__ == '__main__': unittest2.main()
1396a0f39ef46500ce1db499e905c1fca8fa5a5f
tests/bootstrapping/test_bootstrapping.py
tests/bootstrapping/test_bootstrapping.py
import unittest from nala.bootstrapping import generate_documents class TestBootstrapping(unittest.TestCase): def test_generate_documents_number(self): # commenting out for now since it takes about 6 mins on Travis CI test_dataset = generate_documents(1) self.assertEqual(len(test_dataset), 1) pass if __name__ == '__main__': unittest.main()
import unittest from nala.bootstrapping import generate_documents class TestBootstrapping(unittest.TestCase): def test_generate_documents_number(self): # commenting out for now since it takes about 6 mins on Travis CI # test_dataset = generate_documents(1) # self.assertEqual(len(test_dataset), 1) pass if __name__ == '__main__': unittest.main()
Remove long test for now
Remove long test for now
Python
apache-2.0
Rostlab/nalaf
--- +++ @@ -5,8 +5,8 @@ class TestBootstrapping(unittest.TestCase): def test_generate_documents_number(self): # commenting out for now since it takes about 6 mins on Travis CI - test_dataset = generate_documents(1) - self.assertEqual(len(test_dataset), 1) + # test_dataset = generate_documents(1) + # self.assertEqual(len(test_dataset), 1) pass
abe1727600eb1c83c196f9b7bd72e58e4df89c57
feincms/views/base.py
feincms/views/base.py
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.utils import translation from feincms.module.page.models import Page def handler(request, path=None): if path is None: path = request.path page = Page.objects.page_for_path_or_404(path) response = page.setup_request(request) or \ render_to_response(page.template.path, { 'feincms_page': page, }, context_instance=RequestContext(request)) return response
from django.shortcuts import get_object_or_404 from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response from django.template import RequestContext from feincms.module.page.models import Page def build_page_response(page, request): response = page.setup_request(request) or \ render_to_response(page.template.path, { 'feincms_page': page, }, context_instance=RequestContext(request)) return response def handler(request, path=None): """ This is the default handler for feincms page content. """ if path is None: path = request.path page = Page.objects.page_for_path_or_404(path) return build_page_response(page, request) # XXX Needs more restrictive permissions than just "logged in"!! @login_required def preview_handler(request, page_id): """ This handler is for previewing site content; it takes a page_id so the page is uniquely identified and does not care whether the page is active or expired. To balance that, it requires a logged in user. """ page = get_object_or_404(Page, pk=page_id) return build_page_response(page, request)
Add a handler for previewing content (ignores active, publication dates, etc).
Add a handler for previewing content (ignores active, publication dates, etc).
Python
bsd-3-clause
feincms/feincms,nickburlett/feincms,matthiask/django-content-editor,feincms/feincms,nickburlett/feincms,mjl/feincms,matthiask/feincms2-content,joshuajonah/feincms,joshuajonah/feincms,michaelkuty/feincms,hgrimelid/feincms,pjdelport/feincms,matthiask/feincms2-content,joshuajonah/feincms,nickburlett/feincms,feincms/feincms,matthiask/django-content-editor,joshuajonah/feincms,matthiask/django-content-editor,matthiask/django-content-editor,nickburlett/feincms,michaelkuty/feincms,mjl/feincms,hgrimelid/feincms,pjdelport/feincms,hgrimelid/feincms,michaelkuty/feincms,mjl/feincms,michaelkuty/feincms,matthiask/feincms2-content,pjdelport/feincms
--- +++ @@ -1,20 +1,36 @@ -from django.http import HttpResponseRedirect + +from django.shortcuts import get_object_or_404 +from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response from django.template import RequestContext -from django.utils import translation from feincms.module.page.models import Page - -def handler(request, path=None): - if path is None: - path = request.path - - page = Page.objects.page_for_path_or_404(path) - +def build_page_response(page, request): response = page.setup_request(request) or \ render_to_response(page.template.path, { 'feincms_page': page, }, context_instance=RequestContext(request)) return response + +def handler(request, path=None): + """ + This is the default handler for feincms page content. + """ + if path is None: + path = request.path + + page = Page.objects.page_for_path_or_404(path) + return build_page_response(page, request) + +# XXX Needs more restrictive permissions than just "logged in"!! +@login_required +def preview_handler(request, page_id): + """ + This handler is for previewing site content; it takes a page_id so + the page is uniquely identified and does not care whether the page + is active or expired. To balance that, it requires a logged in user. + """ + page = get_object_or_404(Page, pk=page_id) + return build_page_response(page, request)
b2d1c701a0f74c569feb3f9e43ccc97366e5398d
backend/django/apps/accounts/serializers.py
backend/django/apps/accounts/serializers.py
from rest_framework import serializers from .models import BaseAccount class WholeAccountSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True, required=False) class Meta: model = BaseAccount fields = ('id', 'first_name', 'last_name', 'email', 'password', 'phone_number', 'address', 'last_activity_at', 'last_activity_at',) def create(self, validated_data): return BaseAccount.objects.create(**validated_data) def update(self, instance, validated_data): return BaseAccount.objects.update_user( instance=instance, validated_data=validated_data)
from rest_framework import serializers from .models import BaseAccount class WholeAccountSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True, required=False) class Meta: model = BaseAccount fields = ('id', 'first_name', 'last_name', 'email', 'password', 'phone_number', 'address', 'last_activity_at', 'last_activity_at',) def create(self, validated_data): return BaseAccount.objects.create_user(**validated_data) def update(self, instance, validated_data): return BaseAccount.objects.update_user( instance=instance, validated_data=validated_data)
Make the serializer use the new create_user method
Make the serializer use the new create_user method
Python
mit
slavpetroff/sweetshop,slavpetroff/sweetshop
--- +++ @@ -12,7 +12,7 @@ 'last_activity_at',) def create(self, validated_data): - return BaseAccount.objects.create(**validated_data) + return BaseAccount.objects.create_user(**validated_data) def update(self, instance, validated_data): return BaseAccount.objects.update_user(
16c71ce44836a3cea877475340cae7f96241fd5d
tests/test_person.py
tests/test_person.py
from copy import copy from unittest import TestCase from address_book import Person class PersonTestCase(TestCase): def test_get_groups(self): pass def test_add_address(self): basic_address = ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'] person = Person( 'John', 'Doe', copy(basic_address), ['+79834772053'], ['john@gmail.com'] ) person.add_address('new address') self.assertEqual( person.addresses, basic_address + ['new address'] ) def test_add_phone(self): basic_phone = ['+79237778492'] person = Person( 'John', 'Doe', ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'], copy(basic_phone), ['john@gmail.com'] ) person.add_phone_number('+79234478810') self.assertEqual( person.phone_numbers, basic_phone + ['+79234478810'] ) def test_add_email(self): pass
from copy import copy from unittest import TestCase from address_book import Person class PersonTestCase(TestCase): def test_get_groups(self): pass def test_add_address(self): basic_address = ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'] person = Person( 'John', 'Doe', copy(basic_address), ['+79834772053'], ['john@gmail.com'] ) person.add_address('new address') self.assertEqual( person.addresses, basic_address + ['new address'] ) def test_add_phone(self): basic_phone = ['+79237778492'] person = Person( 'John', 'Doe', ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'], copy(basic_phone), ['john@gmail.com'] ) person.add_phone_number('+79234478810') self.assertEqual( person.phone_numbers, basic_phone + ['+79234478810'] ) def test_add_email(self): basic_email = ['john@gmail.com'] person = Person( 'John', 'Doe', ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'], ['+79834772053'], copy(basic_email) ) person.add_email('new@mail.net') self.assertEqual( person.emails, basic_phone + ['new@mail.net'] )
Test the ability to append new email address to the person
Test the ability to append new email address to the person
Python
mit
dizpers/python-address-book-assignment
--- +++ @@ -41,4 +41,16 @@ ) def test_add_email(self): - pass + basic_email = ['john@gmail.com'] + person = Person( + 'John', + 'Doe', + ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'], + ['+79834772053'], + copy(basic_email) + ) + person.add_email('new@mail.net') + self.assertEqual( + person.emails, + basic_phone + ['new@mail.net'] + )
2811edf8908c680b80e6534444cdc48feba9af12
base/components/social/youtube/factories.py
base/components/social/youtube/factories.py
import factory from . import models class ChannelFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Channel class VideoFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Video
import factory from . import models class ChannelFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Channel class VideoFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Video channel = factory.SubFactory(ChannelFactory) class ThumbnailFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Thumbnail video = factory.SubFactory(VideoFactory)
Create a factory for Thumbnails.
Create a factory for Thumbnails.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
--- +++ @@ -9,3 +9,11 @@ class VideoFactory(factory.django.DjangoModelFactory): FACTORY_FOR = models.Video + + channel = factory.SubFactory(ChannelFactory) + + +class ThumbnailFactory(factory.django.DjangoModelFactory): + FACTORY_FOR = models.Thumbnail + + video = factory.SubFactory(VideoFactory)
c92d9c6da02dacdd91a21c3c5675940154c0e21a
cla_backend/apps/reports/db/backend/base.py
cla_backend/apps/reports/db/backend/base.py
from django.db.backends.postgresql_psycopg2.base import * # noqa class DynamicTimezoneDatabaseWrapper(DatabaseWrapper): ''' This exists to allow report generation SQL to set the time zone of the connection without interference from Django, which normally tries to ensure that all connections are UTC if `USE_TZ` is `True`. ''' def create_cursor(self): cursor = self.connection.cursor() cursor.tzinfo_factory = None return cursor DatabaseWrapper = DynamicTimezoneDatabaseWrapper
from django.db.backends.postgresql_psycopg2.base import * # noqa import pytz def local_tzinfo_factory(offset): ''' Create a tzinfo object using the offset of the db connection. This ensures that the datetimes returned are timezone aware and will be printed in the reports with timezone information. ''' return pytz.FixedOffset(offset) class DynamicTimezoneDatabaseWrapper(DatabaseWrapper): ''' This exists to allow report generation SQL to set the time zone of the connection without interference from Django, which normally tries to ensure that all connections are UTC if `USE_TZ` is `True`. ''' def create_cursor(self): cursor = self.connection.cursor() cursor.tzinfo_factory = local_tzinfo_factory return cursor DatabaseWrapper = DynamicTimezoneDatabaseWrapper
Add a tzinfo factory method to replica connection to create local tzinfos
Add a tzinfo factory method to replica connection to create local tzinfos This is to ensure that the datetimes returned for report generation are timezone aware and will thus be printed in the reports with timezone information.
Python
mit
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
--- +++ @@ -1,4 +1,14 @@ from django.db.backends.postgresql_psycopg2.base import * # noqa +import pytz + + +def local_tzinfo_factory(offset): + ''' + Create a tzinfo object using the offset of the db connection. This ensures + that the datetimes returned are timezone aware and will be printed in the + reports with timezone information. + ''' + return pytz.FixedOffset(offset) class DynamicTimezoneDatabaseWrapper(DatabaseWrapper): @@ -10,7 +20,7 @@ def create_cursor(self): cursor = self.connection.cursor() - cursor.tzinfo_factory = None + cursor.tzinfo_factory = local_tzinfo_factory return cursor
eb4d278c061de82b0010eb40279e1dcd8408bd45
test_runner/frameworks.py
test_runner/frameworks.py
import json import logging import os import requests from jinja2 import Template from os.path import abspath, dirname, exists, join from .utils import run_cmd LOG = logging.getLogger(__name__) class Framework(object): def __init__(self, environment): self.admin = environment.admin self.guests = environment.guests self.endpoints = environment.endpoints self.images = environment.images self.network = environment.network self.router = environment.router def populate(self): raise NotImplementedError class Tempest(Framework): def __init__(self, environment): super(Tempest, self).__init__(environment) def populate_config(self): LOG.info('Building configuration file') template_dir = join(abspath(dirname(__file__)), 'files/') with open(join(template_dir, 'tempest.conf.example'), 'r') as fp: sample = fp.read() self.config = Template(sample).render( admin=self.admin, guests=self.guests, endpoints=self.endpoints, images=self.images, network=self.network, router=self.router) with open('/opt/tempest/tempest.conf', 'w') as fp: fp.write(self.config)
import json import logging import os import requests from jinja2 import Template from os.path import abspath, dirname, exists, join from .utils import run_cmd LOG = logging.getLogger(__name__) class Framework(object): def __init__(self, environment): self.admin = environment.admin self.guests = environment.guests self.endpoints = environment.endpoints self.images = environment.images self.network = environment.network self.router = environment.router def populate(self): raise NotImplementedError class Tempest(Framework): def __init__(self, environment): super(Tempest, self).__init__(environment) def populate_config(self): LOG.info('Building configuration file') template_dir = join(abspath(dirname(__file__)), 'files/') with open(join(template_dir, 'tempest.conf.example'), 'r') as fp: sample = fp.read() self.config = Template(sample).render( admin=self.admin, guests=self.guests, endpoints=self.endpoints, images=self.images, network=self.network, router=self.router) with open('/etc/tempest/tempest.conf', 'w') as fp: fp.write(self.config)
Change directory of tempest config
Change directory of tempest config
Python
mit
rcbops-qa/test_runner
--- +++ @@ -45,5 +45,5 @@ network=self.network, router=self.router) - with open('/opt/tempest/tempest.conf', 'w') as fp: + with open('/etc/tempest/tempest.conf', 'w') as fp: fp.write(self.config)
d6eb55d2a2107e217935256667d4aef52bd64593
data/hooks/diagnosis/18-mail.py
data/hooks/diagnosis/18-mail.py
#!/usr/bin/env python import os from yunohost.diagnosis import Diagnoser class MailDiagnoser(Diagnoser): id_ = os.path.splitext(os.path.basename(__file__))[0].split("-")[1] cache_duration = 3600 dependencies = ["ip"] def run(self): return # TODO / FIXME TO BE IMPLEMETED in the future ... # Mail blacklist using dig requests (c.f. ljf's code) # Outgoing port 25 (c.f. code in monitor.py, a simple 'nc -zv yunohost.org 25' IIRC) # SMTP reachability (c.f. check-smtp to be implemented on yunohost's remote diagnoser) # ideally, SPF / DMARC / DKIM validation ... (c.f. https://github.com/alexAubin/yunoScripts/blob/master/yunoDKIM.py possibly though that looks horrible) # check that the mail queue is not filled with hundreds of email pending # check that the recent mail logs are not filled with thousand of email sending (unusual number of mail sent) # check for unusual failed sending attempt being refused in the logs ? def main(args, env, loggers): return MailDiagnoser(args, env, loggers).diagnose()
#!/usr/bin/env python import os from yunohost.diagnosis import Diagnoser class MailDiagnoser(Diagnoser): id_ = os.path.splitext(os.path.basename(__file__))[0].split("-")[1] cache_duration = 3600 dependencies = ["ip"] def run(self): # TODO / FIXME TO BE IMPLEMETED in the future ... yield dict(meta={}, status="WARNING", summary=("nothing_implemented_yet", {})) # Mail blacklist using dig requests (c.f. ljf's code) # Outgoing port 25 (c.f. code in monitor.py, a simple 'nc -zv yunohost.org 25' IIRC) # SMTP reachability (c.f. check-smtp to be implemented on yunohost's remote diagnoser) # ideally, SPF / DMARC / DKIM validation ... (c.f. https://github.com/alexAubin/yunoScripts/blob/master/yunoDKIM.py possibly though that looks horrible) # check that the mail queue is not filled with hundreds of email pending # check that the recent mail logs are not filled with thousand of email sending (unusual number of mail sent) # check for unusual failed sending attempt being refused in the logs ? def main(args, env, loggers): return MailDiagnoser(args, env, loggers).diagnose()
Add tmp dummy mail report so that the diagnoser kinda works instead of failing miserably
Add tmp dummy mail report so that the diagnoser kinda works instead of failing miserably
Python
agpl-3.0
YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost
--- +++ @@ -13,7 +13,11 @@ def run(self): - return # TODO / FIXME TO BE IMPLEMETED in the future ... + # TODO / FIXME TO BE IMPLEMETED in the future ... + + yield dict(meta={}, + status="WARNING", + summary=("nothing_implemented_yet", {})) # Mail blacklist using dig requests (c.f. ljf's code)
b72c7dedc1200d95310fb07bfeb6de8cc1663ffb
src/wirecloudcommons/utils/transaction.py
src/wirecloudcommons/utils/transaction.py
from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed from django.db import DEFAULT_DB_ALIAS from django.http import HttpResponse def commit_on_http_success(func, using=None): """ This decorator activates db commit on HTTP success response. This way, if the view function return a success reponse, a commit is made; if the viewfunc produces an exception or return an error response, a rollback is made. """ if using is None: using = DEFAULT_DB_ALIAS def wrapped_func(*args, **kwargs): enter_transaction_management(using=using) managed(True, using=using) try: res = func(*args, **kwargs) except: if is_dirty(using=using): rollback(using=using) raise else: if is_dirty(using=using): if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400: rollback(using=using) else: try: commit(using=using) except: rollback(using=using) raise leave_transaction_management(using=using) return res return wrapped_func
from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed from django.db import DEFAULT_DB_ALIAS from django.http import HttpResponse def commit_on_http_success(func, using=None): """ This decorator activates db commit on HTTP success response. This way, if the view function return a success reponse, a commit is made; if the viewfunc produces an exception or return an error response, a rollback is made. """ if using is None: using = DEFAULT_DB_ALIAS def wrapped_func(*args, **kwargs): enter_transaction_management(using=using) managed(True, using=using) try: res = func(*args, **kwargs) except: if is_dirty(using=using): rollback(using=using) raise else: if is_dirty(using=using): if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400: rollback(using=using) else: try: commit(using=using) except: rollback(using=using) raise finally: leave_transaction_management(using=using) return res return wrapped_func
Fix commit_on_http_success when an exception is raised
Fix commit_on_http_success when an exception is raised
Python
agpl-3.0
jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud
--- +++ @@ -33,8 +33,9 @@ except: rollback(using=using) raise + finally: + leave_transaction_management(using=using) - leave_transaction_management(using=using) return res return wrapped_func
8225105cf2e72560d8c53ec58c1b98683a613381
util/versioncheck.py
util/versioncheck.py
#!/usr/bin/python from subprocess import check_output as co from sys import exit # Actually run bin/mn rather than importing via python path version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True ) version = version.strip() # Find all Mininet path references lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True ) error = False for line in lines.split( '\n' ): if line and 'Binary' not in line: fname, fversion = line.split( ':' ) if version != fversion: print "%s: incorrect version '%s' (should be '%s')" % ( fname, fversion, version ) error = True if error: exit( 1 )
#!/usr/bin/python from subprocess import check_output as co from sys import exit # Actually run bin/mn rather than importing via python path version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True ) version = version.strip() # Find all Mininet path references lines = co( "egrep -or 'Mininet [0-9\.]+\w*' *", shell=True ) error = False for line in lines.split( '\n' ): if line and 'Binary' not in line: fname, fversion = line.split( ':' ) if version != fversion: print "%s: incorrect version '%s' (should be '%s')" % ( fname, fversion, version ) error = True if error: exit( 1 )
Fix to allow more flexible version numbers
Fix to allow more flexible version numbers
Python
bsd-3-clause
mininet/mininet,mininet/mininet,mininet/mininet
--- +++ @@ -8,7 +8,7 @@ version = version.strip() # Find all Mininet path references -lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True ) +lines = co( "egrep -or 'Mininet [0-9\.]+\w*' *", shell=True ) error = False
895d51105cd51387e3ac5db595333ff794f3e2a7
yotta/lib/ordered_json.py
yotta/lib/ordered_json.py
# Copyright 2014 ARM Limited # # Licensed under the Apache License, Version 2.0 # See LICENSE file for details. # standard library modules, , , import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
# Copyright 2014 ARM Limited # # Licensed under the Apache License, Version 2.0 # See LICENSE file for details. # standard library modules, , , import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.write(u'\n') f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
Add a newline at the end of json files when writing them.
Add a newline at the end of json files when writing them. This fixes the really irritating ping-pong of newline/nonewline when editing json files with an editor, and with `yotta version` commands.
Python
apache-2.0
BlackstoneEngineering/yotta,autopulated/yotta,ARMmbed/yotta,stevenewey/yotta,ARMmbed/yotta,autopulated/yotta,ntoll/yotta,BlackstoneEngineering/yotta,stevenewey/yotta,eyeye/yotta,ntoll/yotta,eyeye/yotta
--- +++ @@ -23,6 +23,7 @@ with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) + f.write(u'\n') f.truncate() def loads(string):
3750bc289685fc243788ee3330676ef7e4387234
apps/accounts/tests/test_user_landing.py
apps/accounts/tests/test_user_landing.py
# (c) Crown Owned Copyright, 2016. Dstl. from django.core.urlresolvers import reverse from django_webtest import WebTest class KeycloakHeaderLandAtHome(WebTest): def test_auto_login_on_landing(self): headers = { 'KEYCLOAK_USERNAME' : 'user@0001.com' } response = self.app.get(reverse('home'), headers=headers) self.assertEqual(reverse('link-list'), response.location)
# (c) Crown Owned Copyright, 2016. Dstl. from django.core.urlresolvers import reverse from django_webtest import WebTest class KeycloakHeaderLandAtHome(WebTest): def test_auto_login_on_landing(self): headers = { 'KEYCLOAK_USERNAME' : 'user@0001.com' } response = self.app.get(reverse('home'), headers=headers) self.assertEqual('http://localhost:80/links', response.location)
Test for landing login now asserts against the correct path.
Test for landing login now asserts against the correct path.
Python
mit
dstl/lighthouse,dstl/lighthouse,dstl/lighthouse,dstl/lighthouse,dstl/lighthouse
--- +++ @@ -8,5 +8,5 @@ def test_auto_login_on_landing(self): headers = { 'KEYCLOAK_USERNAME' : 'user@0001.com' } response = self.app.get(reverse('home'), headers=headers) - self.assertEqual(reverse('link-list'), response.location) + self.assertEqual('http://localhost:80/links', response.location)
65f069c82beea8e96bce780add4f6c3637a0d549
challenge_3/python/ning/challenge_3.py
challenge_3/python/ning/challenge_3.py
def find_majority(sequence): item_counter = dict() for item in sequence: if item not in item_counter: item_counter[item] = 1 else: item_counter[item] += 1 for item, item_count in item_counter.items(): if item_count > len(sequence) / 2: return item test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7] print(find_majority(test_sequence_list))
def find_majority(sequence): item_counter = dict() for item in sequence: if item not in item_counter: item_counter[item] = 1 else: item_counter[item] += 1 if item_counter[item] > len(sequence) / 2: return item test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7] print(find_majority(test_sequence_list))
Include check majority in first loop rather than separate loop
Include check majority in first loop rather than separate loop
Python
mit
mindm/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges
--- +++ @@ -6,10 +6,8 @@ item_counter[item] = 1 else: item_counter[item] += 1 - - for item, item_count in item_counter.items(): - if item_count > len(sequence) / 2: - return item + if item_counter[item] > len(sequence) / 2: + return item test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
fb027f075c3745c5b14a5c611063d161a47f60e4
oidc_apis/id_token.py
oidc_apis/id_token.py
import inspect from .scopes import get_userinfo_by_scopes def process_id_token(payload, user, scope=None): if scope is None: # HACK: Steal the scope argument from the locals dictionary of # the caller, since it was not passed to us scope = inspect.stack()[1][0].f_locals.get('scope', []) payload.update(get_userinfo_by_scopes(user, scope)) payload['preferred_username'] = user.username return payload
import inspect from .scopes import get_userinfo_by_scopes def process_id_token(payload, user, scope=None): if scope is None: # HACK: Steal the scope argument from the locals dictionary of # the caller, since it was not passed to us scope = inspect.stack()[1][0].f_locals.get('scope', []) payload.update(get_userinfo_by_scopes(user, scope)) return payload
Revert "Add username to ID Token"
Revert "Add username to ID Token" This reverts commit 6e1126fe9a8269ff4489ee338000afc852bce922.
Python
mit
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
--- +++ @@ -10,5 +10,4 @@ scope = inspect.stack()[1][0].f_locals.get('scope', []) payload.update(get_userinfo_by_scopes(user, scope)) - payload['preferred_username'] = user.username return payload
91fa1a8eec10b83aa5142d9519a3759b4e310cff
bluebottle/test/factory_models/accounts.py
bluebottle/test/factory_models/accounts.py
from builtins import object import factory from django.contrib.auth.models import Group from bluebottle.members.models import Member class BlueBottleUserFactory(factory.DjangoModelFactory): class Meta(object): model = Member username = factory.Faker('email') email = factory.Faker('email') first_name = factory.Sequence(lambda f: u'user_{0}'.format(f)) last_name = factory.Sequence(lambda l: u'user_{0}'.format(l)) is_active = True is_staff = False is_superuser = False @classmethod def _create(cls, model_class, *args, **kwargs): user = super(BlueBottleUserFactory, cls)._create(model_class, *args, **kwargs) # ensure the raw password gets set after the initial save password = kwargs.pop("password", None) if password: user.set_password(password) user.save() return user class GroupFactory(factory.DjangoModelFactory): class Meta(object): model = Group name = factory.Sequence(lambda n: u'group_{0}'.format(n))
from builtins import object import factory from django.contrib.auth.models import Group from bluebottle.members.models import Member class BlueBottleUserFactory(factory.DjangoModelFactory): class Meta(object): model = Member username = factory.Sequence(lambda n: u'user_{0}'.format(n)) email = factory.Sequence(lambda o: u'user_{0}@onepercentclub.com'.format(o)) first_name = factory.Sequence(lambda f: u'user_{0}'.format(f)) last_name = factory.Sequence(lambda l: u'user_{0}'.format(l)) is_active = True is_staff = False is_superuser = False @classmethod def _create(cls, model_class, *args, **kwargs): user = super(BlueBottleUserFactory, cls)._create(model_class, *args, **kwargs) # ensure the raw password gets set after the initial save password = kwargs.pop("password", None) if password: user.set_password(password) user.save() return user class GroupFactory(factory.DjangoModelFactory): class Meta(object): model = Group name = factory.Sequence(lambda n: u'group_{0}'.format(n))
Fix duplicate users during tests
Fix duplicate users during tests
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
--- +++ @@ -10,8 +10,8 @@ class Meta(object): model = Member - username = factory.Faker('email') - email = factory.Faker('email') + username = factory.Sequence(lambda n: u'user_{0}'.format(n)) + email = factory.Sequence(lambda o: u'user_{0}@onepercentclub.com'.format(o)) first_name = factory.Sequence(lambda f: u'user_{0}'.format(f)) last_name = factory.Sequence(lambda l: u'user_{0}'.format(l)) is_active = True
4b6c27e02667fe6f5208b5b5dfa1f5dafe112c30
luigi/tasks/export/search/chunk.py
luigi/tasks/export/search/chunk.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import luigi from luigi import LocalTarget from tasks.config import db from tasks.config import output from rnacentral.db import cursor from rnacentral.search import exporter class SearchChunkTask(luigi.Task): # pylint: disable=R0904 """ This is a task that will create an xml export for the given range of UPI's. """ min = luigi.IntParameter() max = luigi.IntParameter() def output(self): config = output() filepattern = 'xml_export_{min}_{max}.xml'.format( min=self.min, max=self.max, ) filename = os.path.join(config.search_files, filepattern) return LocalTarget(filename) def run(self): with self.output().open('w') as raw: with cursor(db()) as cur: results = exporter.range(cur, self.min, self.max) exporter.write(raw, results)
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import luigi from luigi import LocalTarget from tasks.config import db from tasks.config import output from rnacentral.search import exporter class SearchChunkTask(luigi.Task): # pylint: disable=R0904 """ This is a task that will create an xml export for the given range of UPI's. """ min = luigi.IntParameter() max = luigi.IntParameter() def output(self): config = output() filepattern = 'xml_export_{min}_{max}.xml'.format( min=self.min, max=self.max, ) filename = os.path.join(config.search_files, filepattern) return LocalTarget(filename) def run(self): with self.output().open('w') as raw: results = exporter.range(db(), self.min, self.max) exporter.write(raw, results)
Use new psql based export
Use new psql based export This changes the function arguments to use.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
--- +++ @@ -21,7 +21,6 @@ from tasks.config import db from tasks.config import output -from rnacentral.db import cursor from rnacentral.search import exporter @@ -44,6 +43,5 @@ def run(self): with self.output().open('w') as raw: - with cursor(db()) as cur: - results = exporter.range(cur, self.min, self.max) - exporter.write(raw, results) + results = exporter.range(db(), self.min, self.max) + exporter.write(raw, results)
4cd345aa9a6b642af60529a94f047cf32847e262
rplugin/python3/denite/source/gtags_path.py
rplugin/python3/denite/source/gtags_path.py
from .gtags_def import GtagsBase class Source(GtagsBase): def __init__(self, vim): super().__init__(vim) self.name = 'gtags_path' self.kind = 'file' @classmethod def get_search_flags(cls): return ['-P'] def gather_candidates(self, context): tags = self.exec_global(self.get_search_flags()) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): return [{'word': t, 'action__path': t} for t in tags]
from .gtags_def import GtagsBase class Source(GtagsBase): def __init__(self, vim): super().__init__(vim) self.name = 'gtags_path' self.kind = 'file' @classmethod def get_search_flags(cls): return ['-P'] def gather_candidates(self, context): tags = self.exec_global(self.get_search_flags(), context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): return [{'word': t, 'action__path': t} for t in tags]
Fix missing parameter in path source
Fix missing parameter in path source
Python
mit
ozelentok/denite-gtags
--- +++ @@ -13,7 +13,7 @@ return ['-P'] def gather_candidates(self, context): - tags = self.exec_global(self.get_search_flags()) + tags = self.exec_global(self.get_search_flags(), context) candidates = self._convert_to_candidates(tags) return candidates
71aa5a7d665a432b6a01ec08bfba92e50d7fc29d
badgekit_webhooks/claimcode_views.py
badgekit_webhooks/claimcode_views.py
from __future__ import unicode_literals from django.views.generic.edit import FormView from .forms import SendClaimCodeForm from .models import Badge class SendClaimCodeView(FormView): template_name = 'badgekit_webhooks/send_claim_code.html' form_class = SendClaimCodeForm success_url = '/' # TODO def form_valid(self, form): self.send_claim_mail(form) return super(SendClaimCodeView, self).form_valid(form) def send_claim_mail(self, form): code = Badge.create_claim_code( form.cleaned_data['badge'], form.cleaned_data['awardee']) print(code) # TODO: send the code in an email, etc.
from __future__ import unicode_literals from django.views.generic.edit import FormMixin from django.views.generic.base import View, TemplateResponseMixin from .forms import SendClaimCodeForm from .models import Badge # This view starts as a copy of django.views.generic.edit.ProcessFormView class SendClaimCodeView(TemplateResponseMixin, FormMixin, View): template_name = 'badgekit_webhooks/send_claim_code.html' form_class = SendClaimCodeForm success_url = '/' # TODO def get(self, request, *args, **kwargs): """ Handles GET requests and instantiates a blank version of the form. """ form_class = self.get_form_class() form = self.get_form(form_class) return self.render_to_response(self.get_context_data(form=form)) def post(self, request, *args, **kwargs): """ Handles POST requests, instantiating a form instance with the passed POST variables and then checked for validity. """ form_class = self.get_form_class() form = self.get_form(form_class) if form.is_valid(): return self.form_valid(form) else: return self.form_invalid(form) # PUT is a valid HTTP verb for creating (with a known URL) or editing an # object, note that browsers only support POST for now. def put(self, *args, **kwargs): return self.post(*args, **kwargs) def form_valid(self, form): self.send_claim_mail(form) return super(SendClaimCodeView, self).form_valid(form) def send_claim_mail(self, form): # if the code doesn't work, tell the admin so? code = Badge.create_claim_code( form.cleaned_data['badge'], form.cleaned_data['awardee']) print(code) # TODO: send the code in an email, etc.
Refactor SendClaimCodeView to be more explict, will then modify
Refactor SendClaimCodeView to be more explict, will then modify
Python
mit
tgs/django-badgekit-webhooks
--- +++ @@ -1,18 +1,46 @@ from __future__ import unicode_literals -from django.views.generic.edit import FormView +from django.views.generic.edit import FormMixin +from django.views.generic.base import View, TemplateResponseMixin from .forms import SendClaimCodeForm from .models import Badge -class SendClaimCodeView(FormView): +# This view starts as a copy of django.views.generic.edit.ProcessFormView +class SendClaimCodeView(TemplateResponseMixin, FormMixin, View): template_name = 'badgekit_webhooks/send_claim_code.html' form_class = SendClaimCodeForm success_url = '/' # TODO + + def get(self, request, *args, **kwargs): + """ + Handles GET requests and instantiates a blank version of the form. + """ + form_class = self.get_form_class() + form = self.get_form(form_class) + return self.render_to_response(self.get_context_data(form=form)) + + def post(self, request, *args, **kwargs): + """ + Handles POST requests, instantiating a form instance with the passed + POST variables and then checked for validity. + """ + form_class = self.get_form_class() + form = self.get_form(form_class) + if form.is_valid(): + return self.form_valid(form) + else: + return self.form_invalid(form) + + # PUT is a valid HTTP verb for creating (with a known URL) or editing an + # object, note that browsers only support POST for now. + def put(self, *args, **kwargs): + return self.post(*args, **kwargs) def form_valid(self, form): self.send_claim_mail(form) return super(SendClaimCodeView, self).form_valid(form) def send_claim_mail(self, form): + # if the code doesn't work, tell the admin so? code = Badge.create_claim_code( form.cleaned_data['badge'], form.cleaned_data['awardee'])
6d6b43ef861451e4d94acc5d7821b19734e60673
apps/local_apps/account/context_processors.py
apps/local_apps/account/context_processors.py
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): account = AnonymousAccount(request) if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = None return {'account': account}
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
Handle the exception case in the account context_processor.
Handle the exception case in the account context_processor.
Python
mit
ingenieroariel/pinax,ingenieroariel/pinax
--- +++ @@ -5,10 +5,11 @@ return {'openid': request.openid} def account(request): - account = AnonymousAccount(request) if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): - account = None + account = AnonymousAccount(request) + else: + account = AnonymousAccount(request) return {'account': account}
581285a6d887ab9beef3e2014db7f259109d1b5a
exercises/ex20.py
exercises/ex20.py
#!/usr/bin/python def translate(translation_list): trans_dict = {"merry":"god", "christmas":"jul", "and":"och", "happy":"gott", "new":"nytt", "year":"år"} final_translation_list = [] for word in translation_list: if word in trans_dict: final_translation_list.append(trans_dict[word]) return(final_translation_list)
# -*- coding: utf-8 -*- #!/usr/bin/python def translate(translation_list): trans_dict = {"merry":"god", "christmas":"jul", "and":"och", "happy":"gott", "new":"nytt", "year":"år"} final_translation_list = [] for word in translation_list: if word in trans_dict: final_translation_list.append(trans_dict[word]) return(final_translation_list)
Add some utf-8 encoding due to special chars.
Add some utf-8 encoding due to special chars.
Python
mit
gravyboat/python-exercises
--- +++ @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- #!/usr/bin/python
b8ffeaa3274ead2c437526d507264af219929b61
gerrit/plugins.py
gerrit/plugins.py
# -*- coding: utf-8 -*- URLS = { } class Plugins(object): """ This class provide plugin-related methods Plugin related REST endpoints: https://gerrit-review.googlesource.com/Documentation/rest-api-plugin.html """ def __init__(self, gerrit): self.gerrit = gerrit self.gerrit.URLS.update(URLS)
# -*- coding: utf-8 -*- URLS = { 'LIST': 'plugins/', 'INSTALL': 'plugins/%(plugin_id)s', 'STATUS': 'plugins/%(plugin_id)s/gerrit~status', 'ENABLE': 'plugins/%(plugin_id)s/gerrit~enable', 'DISABLE': 'plugins/%(plugin_id)s/gerrit~disable', 'RELOAD': 'plugins/%(plugin_id)s/gerrit~reload', } class Plugins(object): """ This class provide plugin-related methods Plugin related REST endpoints: https://gerrit-review.googlesource.com/Documentation/rest-api-plugin.html """ def __init__(self, gerrit): self.gerrit = gerrit self.gerrit.URLS.update(URLS) def list(self, list_all=False): """Lists the plugins installed on the Gerrit server. Only the enabled plugins are returned unless the all option is specified.""" url = self.gerrit.url('LIST') if list_all: url += '?all' r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r) def install(self, plugin_id): """Installs a new plugin on the Gerrit server. """ # TODO: install pass def status(self, plugin_id): """Retrieves the status of a plugin on the Gerrit server.""" url = self.gerrit.url('STATUS', plugin_id=plugin_id) r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r) def enable(self, plugin_id): """Enables a plugin on the Gerrit server.""" url = self.gerrit.url('ENABLE', plugin_id=plugin_id) r = Request(method='POST', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r) def disable(self, plugin_id): """Disables a plugin on the Gerrit server.""" url = self.gerrit.url('DISABLE', plugin_id=plugin_id) r = Request(method='POST', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r) def reload(self, plugin_id): """Reloads a plugin on the Gerrit server.""" url = self.gerrit.url('RELOAD', plugin_id=plugin_id) r = Request(method='POST', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
Add methods for Plugin Endpoints
Add methods for Plugin Endpoints Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com>
Python
apache-2.0
yumminhuang/gerrit.py
--- +++ @@ -1,6 +1,12 @@ # -*- coding: utf-8 -*- URLS = { + 'LIST': 'plugins/', + 'INSTALL': 'plugins/%(plugin_id)s', + 'STATUS': 'plugins/%(plugin_id)s/gerrit~status', + 'ENABLE': 'plugins/%(plugin_id)s/gerrit~enable', + 'DISABLE': 'plugins/%(plugin_id)s/gerrit~disable', + 'RELOAD': 'plugins/%(plugin_id)s/gerrit~reload', } @@ -13,3 +19,43 @@ def __init__(self, gerrit): self.gerrit = gerrit self.gerrit.URLS.update(URLS) + + def list(self, list_all=False): + """Lists the plugins installed on the Gerrit server. + Only the enabled plugins are returned + unless the all option is specified.""" + url = self.gerrit.url('LIST') + if list_all: + url += '?all' + r = Request(method='GET', url=url, auth=self.gerrit.auth) + return self.gerrit.dispatch(r) + + def install(self, plugin_id): + """Installs a new plugin on the Gerrit server. + """ + # TODO: install + pass + + def status(self, plugin_id): + """Retrieves the status of a plugin on the Gerrit server.""" + url = self.gerrit.url('STATUS', plugin_id=plugin_id) + r = Request(method='GET', url=url, auth=self.gerrit.auth) + return self.gerrit.dispatch(r) + + def enable(self, plugin_id): + """Enables a plugin on the Gerrit server.""" + url = self.gerrit.url('ENABLE', plugin_id=plugin_id) + r = Request(method='POST', url=url, auth=self.gerrit.auth) + return self.gerrit.dispatch(r) + + def disable(self, plugin_id): + """Disables a plugin on the Gerrit server.""" + url = self.gerrit.url('DISABLE', plugin_id=plugin_id) + r = Request(method='POST', url=url, auth=self.gerrit.auth) + return self.gerrit.dispatch(r) + + def reload(self, plugin_id): + """Reloads a plugin on the Gerrit server.""" + url = self.gerrit.url('RELOAD', plugin_id=plugin_id) + r = Request(method='POST', url=url, auth=self.gerrit.auth) + return self.gerrit.dispatch(r)
cf0850e23b07c656bd2bc56c88f9119dc4142931
mooch/banktransfer.py
mooch/banktransfer.py
from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url)
from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def __init__(self, *, autocharge, **kw): self.autocharge = autocharge super(BankTransferMoocher, self).__init__(**kw) def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier if self.autocharge: instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url)
Allow disabling the autocharging behavior of the bank transfer moocher
Allow disabling the autocharging behavior of the bank transfer moocher
Python
mit
matthiask/django-mooch,matthiask/django-mooch,matthiask/django-mooch
--- +++ @@ -12,6 +12,10 @@ class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') + + def __init__(self, *, autocharge, **kw): + self.autocharge = autocharge + super(BankTransferMoocher, self).__init__(**kw) def get_urls(self): return [ @@ -28,7 +32,8 @@ def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier - instance.charged_at = timezone.now() + if self.autocharge: + instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save()
b569ce0ae444e43cf6a64dd034186877cc259e2d
luigi/tasks/export/ftp/fasta/__init__.py
luigi/tasks/export/ftp/fasta/__init__.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from tasks.utils.compress import GenericCompressTask from .active import ActiveFastaExport from .active import SpeciesSpecificFastaExport from .inactive import InactiveFastaExport from .nhmmer import NHmmerIncludedExport from .nhmmer import NHmmerExcludedExport from .readme import FastaReadme class CompressExport(GenericCompressTask): """ This will compress the files the generic FASTA export files. """ def inputs(self): yield ActiveFastaExport() yield InactiveFastaExport() yield SpeciesSpecificFastaExport() def requires(self): for requirement in super(CompressExport, self).requires(): yield requirement yield NHmmerExcludedExport() yield NHmmerIncludedExport() class FastaExport(luigi.WrapperTask): """ This is the main class to generate all FASTA file exports. """ def requires(self): yield FastaReadme() yield NHmmerExport() yield CompressExport() class NHmmerExport(luigi.WrapperTask): """ This does the exports required for nhmmer. """ def requires(self): yield NHmmerExcludedExport() yield NHmmerIncludedExport()
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from .active import ActiveFastaExport from .active import SpeciesSpecificFastaExport from .inactive import InactiveFastaExport from .nhmmer import NHmmerIncludedExport from .nhmmer import NHmmerExcludedExport from .readme import FastaReadme class FastaExport(luigi.WrapperTask): """ This is the main class to generate all FASTA file exports. """ def requires(self): yield FastaReadme() yield ActiveFastaExport() yield InactiveFastaExport() yield SpeciesSpecificFastaExport() yield NHmmerExcludedExport() yield NHmmerIncludedExport()
Clean up main fasta tasks
Clean up main fasta tasks This removes the unneeded compress tasks as well as makes it easier to know what task to run.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
--- +++ @@ -15,31 +15,12 @@ import luigi -from tasks.utils.compress import GenericCompressTask - from .active import ActiveFastaExport from .active import SpeciesSpecificFastaExport from .inactive import InactiveFastaExport from .nhmmer import NHmmerIncludedExport from .nhmmer import NHmmerExcludedExport from .readme import FastaReadme - - -class CompressExport(GenericCompressTask): - """ - This will compress the files the generic FASTA export files. - """ - - def inputs(self): - yield ActiveFastaExport() - yield InactiveFastaExport() - yield SpeciesSpecificFastaExport() - - def requires(self): - for requirement in super(CompressExport, self).requires(): - yield requirement - yield NHmmerExcludedExport() - yield NHmmerIncludedExport() class FastaExport(luigi.WrapperTask): @@ -49,15 +30,8 @@ def requires(self): yield FastaReadme() - yield NHmmerExport() - yield CompressExport() - - -class NHmmerExport(luigi.WrapperTask): - """ - This does the exports required for nhmmer. - """ - - def requires(self): + yield ActiveFastaExport() + yield InactiveFastaExport() + yield SpeciesSpecificFastaExport() yield NHmmerExcludedExport() yield NHmmerIncludedExport()
1ffff2738c4ced2aedb8b63f5c729860aab1bac7
marshmallow_jsonapi/__init__.py
marshmallow_jsonapi/__init__.py
from .schema import Schema, SchemaOpts __version__ = "0.21.2" __author__ = "Steven Loria" __license__ = "MIT" __all__ = ("Schema", "SchemaOpts")
from .schema import Schema, SchemaOpts __version__ = "0.21.2" __all__ = ("Schema", "SchemaOpts")
Remove unnecessary `__author__` and `__license__`
Remove unnecessary `__author__` and `__license__`
Python
mit
marshmallow-code/marshmallow-jsonapi
--- +++ @@ -1,7 +1,4 @@ from .schema import Schema, SchemaOpts __version__ = "0.21.2" -__author__ = "Steven Loria" -__license__ = "MIT" - __all__ = ("Schema", "SchemaOpts")
238b92a561f9285f7030b1dae51a3ffeb3de579e
tests/integrations/test_feedback_service.py
tests/integrations/test_feedback_service.py
# -*- coding: utf-8 -*- import pytest from junction.feedback import service from .. import factories pytestmark = pytest.mark.django_db def test_get_feedback_questions_without_conference(): result = service.get_feedback_questions(conference_id=23) assert result == {} def test_get_feedback_questions_with_conference(): schedule_item_types = ['Workshop', 'Talk'] num_choice_questions = 2 num_text_questions = 1 objects = factories.create_feedback_questions( schedule_item_types=schedule_item_types, num_text_questions=num_text_questions, num_choice_questions=num_choice_questions) conference = objects['conference'] result = service.get_feedback_questions(conference_id=conference.id) assert list(result.keys()) == schedule_item_types for item_type in schedule_item_types: assert len(result[item_type]['text']) == num_text_questions assert len(result[item_type]['choice']) == num_choice_questions
# -*- coding: utf-8 -*- import pytest from junction.feedback import service from .. import factories pytestmark = pytest.mark.django_db def test_get_feedback_questions_without_conference(): result = service.get_feedback_questions(conference_id=23) assert result == {} def test_get_feedback_questions_with_conference(): schedule_item_types = set(['Workshop', 'Talk']) num_choice_questions = 2 num_text_questions = 1 objects = factories.create_feedback_questions( schedule_item_types=schedule_item_types, num_text_questions=num_text_questions, num_choice_questions=num_choice_questions) conference = objects['conference'] result = service.get_feedback_questions(conference_id=conference.id) assert set(result.keys()) == schedule_item_types for item_type in schedule_item_types: assert len(result[item_type]['text']) == num_text_questions assert len(result[item_type]['choice']) == num_choice_questions
Use set to compare list of values
Use set to compare list of values
Python
mit
pythonindia/junction,ChillarAnand/junction,ChillarAnand/junction,pythonindia/junction,ChillarAnand/junction,ChillarAnand/junction,pythonindia/junction,pythonindia/junction
--- +++ @@ -15,7 +15,7 @@ def test_get_feedback_questions_with_conference(): - schedule_item_types = ['Workshop', 'Talk'] + schedule_item_types = set(['Workshop', 'Talk']) num_choice_questions = 2 num_text_questions = 1 objects = factories.create_feedback_questions( @@ -26,7 +26,7 @@ conference = objects['conference'] result = service.get_feedback_questions(conference_id=conference.id) - assert list(result.keys()) == schedule_item_types + assert set(result.keys()) == schedule_item_types for item_type in schedule_item_types: assert len(result[item_type]['text']) == num_text_questions assert len(result[item_type]['choice']) == num_choice_questions
fc7323ddb0b2700ad459a8016fe4e56d9ac8e352
morepath/tests/test_template.py
morepath/tests/test_template.py
import os import morepath from webtest import TestApp as Client import pytest from .fixtures import template def setup_module(module): morepath.disable_implicit() def test_template(): config = morepath.setup() config.scan(template) config.commit() c = Client(template.App()) response = c.get('/world') assert response.body == b'<p>Hello world!</p>\n' def test_template(): config = morepath.setup() class App(morepath.App): testing_config = config @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_engine(extension='.format') def get_format_render(path, original_render, settings): with open(path, 'rb') as f: template = f.read() def render(content, request): return original_render(template.format(**content), request) return render # relative paths don't work inside a test, only in a real # fixture full_template_path = os.path.join(os.path.dirname(__file__), 'templates/person.format') @App.html(model=Person, template=full_template_path) def person_default(self, request): return { 'name': self.name } config.commit() c = Client(App()) response = c.get('/world') assert response.body == b'<p>Hello world!</p>\n'
import os import morepath from webtest import TestApp as Client import pytest from .fixtures import template def setup_module(module): morepath.disable_implicit() def test_template_fixture(): config = morepath.setup() config.scan(template) config.commit() c = Client(template.App()) response = c.get('/world') assert response.body == b'<p>Hello world!</p>\n' def test_template_inline(): config = morepath.setup() class App(morepath.App): testing_config = config @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_engine(extension='.format') def get_format_render(path, original_render, settings): with open(path, 'rb') as f: template = f.read() def render(content, request): return original_render(template.format(**content), request) return render # relative paths don't work inside a test, only in a real # fixture full_template_path = os.path.join(os.path.dirname(__file__), 'templates/person.format') @App.html(model=Person, template=full_template_path) def person_default(self, request): return { 'name': self.name } config.commit() c = Client(App()) response = c.get('/world') assert response.body == b'<p>Hello world!</p>\n'
Rename one test so it actually gets run.
Rename one test so it actually gets run.
Python
bsd-3-clause
morepath/morepath,faassen/morepath,taschini/morepath
--- +++ @@ -9,7 +9,7 @@ morepath.disable_implicit() -def test_template(): +def test_template_fixture(): config = morepath.setup() config.scan(template) config.commit() @@ -19,7 +19,7 @@ assert response.body == b'<p>Hello world!</p>\n' -def test_template(): +def test_template_inline(): config = morepath.setup() class App(morepath.App):
63130e4e438c34815bcd669d0088235140a62ced
readthedocs/core/utils/tasks/__init__.py
readthedocs/core/utils/tasks/__init__.py
from .permission_checks import * from .public import * from .retrieve import *
from .permission_checks import user_id_matches from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data
Remove star imports to remove pyflakes errors
Remove star imports to remove pyflakes errors
Python
mit
safwanrahman/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,tddv/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,pombredanne/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,pombredanne/readthedocs.org,davidfischer/readthedocs.org
--- +++ @@ -1,3 +1,5 @@ -from .permission_checks import * -from .public import * -from .retrieve import * +from .permission_checks import user_id_matches +from .public import permission_check +from .public import get_public_task_data +from .retrieve import TaskNotFound +from .retrieve import get_task_data
cecea26672d74a026383c08ebf26bc72ab2ee66c
pingparsing/_pingtransmitter.py
pingparsing/_pingtransmitter.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import from __future__ import unicode_literals import platform import dataproperty class PingTransmitter(object): def __init__(self): self.destination_host = "" self.waittime = 1 self.ping_option = "" def ping(self): import subprocess self.__validate_ping_param() command_list = [ "ping", self.destination_host, ] if dataproperty.is_not_empty_string(self.ping_option): command_list.append(self.ping_option) if platform.system() == "Windows": command_list.append("-n {:d}".format(self.waittime)) else: command_list.append("-q -w {:d}".format(self.waittime)) ping_proc = subprocess.Popen( " ".join(command_list), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = ping_proc.communicate() if ping_proc.returncode != 0: raise RuntimeError(stderr) return stdout def __validate_ping_param(self): if dataproperty.is_empty_string(self.destination_host): raise ValueError("destination_host host is empty") if self.waittime <= 0: raise ValueError( "waittime expected to be greater than or equal to zero")
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import from __future__ import unicode_literals from collections import namedtuple import platform import dataproperty PingResult = namedtuple("PingResult", "stdout stderr returncode") class PingTransmitter(object): def __init__(self): self.destination_host = "" self.waittime = 1 self.ping_option = "" def ping(self): import subprocess self.__validate_ping_param() command_list = [ "ping", self.destination_host, ] if dataproperty.is_not_empty_string(self.ping_option): command_list.append(self.ping_option) if platform.system() == "Windows": command_list.append("-n {:d}".format(self.waittime)) else: command_list.append("-q -w {:d}".format(self.waittime)) ping_proc = subprocess.Popen( " ".join(command_list), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = ping_proc.communicate() return PingResult(stdout, stderr, ping_proc.returncode) def __validate_ping_param(self): if dataproperty.is_empty_string(self.destination_host): raise ValueError("destination_host host is empty") if self.waittime <= 0: raise ValueError( "wait time expected to be greater than or equal to zero")
Change ping result error handling to return values instead of raise exception
Change ping result error handling to return values instead of raise exception
Python
mit
thombashi/pingparsing,thombashi/pingparsing
--- +++ @@ -6,9 +6,13 @@ from __future__ import absolute_import from __future__ import unicode_literals +from collections import namedtuple import platform import dataproperty + + +PingResult = namedtuple("PingResult", "stdout stderr returncode") class PingTransmitter(object): @@ -41,10 +45,7 @@ stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = ping_proc.communicate() - if ping_proc.returncode != 0: - raise RuntimeError(stderr) - - return stdout + return PingResult(stdout, stderr, ping_proc.returncode) def __validate_ping_param(self): if dataproperty.is_empty_string(self.destination_host): @@ -52,4 +53,4 @@ if self.waittime <= 0: raise ValueError( - "waittime expected to be greater than or equal to zero") + "wait time expected to be greater than or equal to zero")
9abcfcf971b053a5d3859faefc77a7622c00fc22
gratipay/package_managers/readmes.py
gratipay/package_managers/readmes.py
from __future__ import absolute_import, division, print_function, unicode_literals import sys import requests from gratipay.utils import markdown from gratipay.utils.threaded_map import threaded_map from threading import Lock log_lock = Lock() def log(*a, **kw): with log_lock: print(*a, file=sys.stderr, **kw) def http_fetch(package_name): r = requests.get('https://registry.npmjs.com/' + package_name) if r.status_code != 200: log(r.status_code, 'for', package_name) return None return r.json() def Syncer(db): def sync(dirty, fetch=http_fetch): """Update all info for one package. """ log(dirty.name) full = fetch(dirty.name) if not full: return # try again later assert full['name'] == dirty.name db.run(''' UPDATE packages SET readme=%s , readme_raw=%s , readme_type=%s WHERE package_manager=%s AND name=%s ''', ( markdown.marky(full['readme']) , full['readme'] , 'x-markdown/npm' , dirty.package_manager , dirty.name )) return sync def sync_all(db): dirty = db.all('SELECT package_manager, name FROM packages WHERE readme_raw IS NULL ' 'ORDER BY package_manager DESC, name DESC') threaded_map(Syncer(db), dirty, 10)
from __future__ import absolute_import, division, print_function, unicode_literals import sys import requests from gratipay.utils import markdown from gratipay.utils.threaded_map import threaded_map from threading import Lock log_lock = Lock() def log(*a, **kw): with log_lock: print(*a, file=sys.stderr, **kw) def http_fetch(package_name): r = requests.get('https://registry.npmjs.com/' + package_name) if r.status_code != 200: log(r.status_code, 'for', package_name) return None return r.json() def Syncer(db): def sync(dirty, fetch=http_fetch): """Update all info for one package. """ log(dirty.name) full = fetch(dirty.name) if not full: return # try again later assert full['name'] == dirty.name db.run(''' UPDATE packages SET readme=%s , readme_raw=%s , readme_type=%s WHERE package_manager=%s AND name=%s ''', ( markdown.marky(full['readme']) , full['readme'] , 'x-markdown/npm' , dirty.package_manager , dirty.name )) return sync def sync_all(db): dirty = db.all('SELECT package_manager, name FROM packages WHERE readme_raw IS NULL ' 'ORDER BY package_manager DESC, name DESC') threaded_map(Syncer(db), dirty, 4)
Drop threads down to limit memory consumption
Drop threads down to limit memory consumption
Python
mit
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
--- +++ @@ -56,4 +56,4 @@ def sync_all(db): dirty = db.all('SELECT package_manager, name FROM packages WHERE readme_raw IS NULL ' 'ORDER BY package_manager DESC, name DESC') - threaded_map(Syncer(db), dirty, 10) + threaded_map(Syncer(db), dirty, 4)
760ba24c1413d4a0b7f68d8ce2d6a30df8ad8e4a
mothermayi/git.py
mothermayi/git.py
import contextlib import logging import subprocess LOGGER = logging.getLogger(__name__) def execute(command): LOGGER.debug("Executing %s", ' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate(input=None) if proc.returncode != 0: raise Exception("Failed to execute command {}".format(' '.join(command))) LOGGER.debug("stdout %s", stdout) LOGGER.debug("stderr %s", stderr) @contextlib.contextmanager def stash(): execute(['git', 'stash', '-u', '--keep-index']) yield execute(['git', 'reset', '--hard']) execute(['git', 'stash', 'pop', '--quiet', '--index'])
import contextlib import logging import subprocess LOGGER = logging.getLogger(__name__) def execute(command): LOGGER.debug("Executing %s", ' '.join(command)) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate(input=None) if proc.returncode != 0: raise Exception("Failed to execute command {}".format(' '.join(command))) LOGGER.debug("stdout %s", stdout) LOGGER.debug("stderr %s", stderr) @contextlib.contextmanager def stash(): execute(['git', 'stash', '-u', '--keep-index']) try: yield finally: execute(['git', 'reset', '--hard']) execute(['git', 'stash', 'pop', '--quiet', '--index'])
Put un-stash in finally block
Put un-stash in finally block Without it a failure will lead to us not putting the user back in the state they expect to be in
Python
mit
EliRibble/mothermayi
--- +++ @@ -16,6 +16,8 @@ @contextlib.contextmanager def stash(): execute(['git', 'stash', '-u', '--keep-index']) - yield - execute(['git', 'reset', '--hard']) - execute(['git', 'stash', 'pop', '--quiet', '--index']) + try: + yield + finally: + execute(['git', 'reset', '--hard']) + execute(['git', 'stash', 'pop', '--quiet', '--index'])
9db0f0430466b9d4d70c7803f7d39ecdeb85e375
src/puzzle/problems/image/image_problem.py
src/puzzle/problems/image/image_problem.py
import numpy as np from puzzle.problems import problem class ImageProblem(problem.Problem): def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None: super(ImageProblem, self).__init__(name, data, *args, **kwargs) @staticmethod def score(data: problem.ProblemData) -> float: if not isinstance(data, np.ndarray): return 0 if data.dtype == np.uint8: return 1 return .5 def __str__(self) -> str: return '<image data>' def _solve(self) -> dict: return {}
import numpy as np from data.image import image from puzzle.constraints.image import prepare_image_constraints from puzzle.problems import problem from puzzle.steps.image import prepare_image class ImageProblem(problem.Problem): _source_image: image.Image _prepare_image: prepare_image.PrepareImage def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None: super(ImageProblem, self).__init__(name, data, *args, **kwargs) self._source_image = image.Image(data) self._prepare_image = prepare_image.PrepareImage( prepare_image_constraints.PrepareImageConstraints(), self._source_image) self._solutions_generator.depends_on(self._prepare_image) @staticmethod def score(data: problem.ProblemData) -> float: if not isinstance(data, np.ndarray): return 0 if data.dtype == np.uint8: return 1 return .5 def __str__(self) -> str: return '<image data>' def _solve(self) -> dict: return {}
Update ImageProblem to use PrepareImage step.
Update ImageProblem to use PrepareImage step.
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
--- +++ @@ -1,11 +1,21 @@ import numpy as np +from data.image import image +from puzzle.constraints.image import prepare_image_constraints from puzzle.problems import problem +from puzzle.steps.image import prepare_image class ImageProblem(problem.Problem): + _source_image: image.Image + _prepare_image: prepare_image.PrepareImage + def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None: super(ImageProblem, self).__init__(name, data, *args, **kwargs) + self._source_image = image.Image(data) + self._prepare_image = prepare_image.PrepareImage( + prepare_image_constraints.PrepareImageConstraints(), self._source_image) + self._solutions_generator.depends_on(self._prepare_image) @staticmethod def score(data: problem.ProblemData) -> float:
a5dee47e46d3bdc70a4fecf23e35d7e56c4c3177
geotrek/common/management/commands/migrate.py
geotrek/common/management/commands/migrate.py
from django.apps import apps from django.conf import settings from django.contrib.gis.gdal import SpatialReference from django.core.exceptions import ImproperlyConfigured from django.core.management import call_command from django.core.management.commands.migrate import Command as BaseCommand from geotrek.common.utils.postgresql import move_models_to_schemas, load_sql_files, set_search_path def check_srid_has_meter_unit(): if not hasattr(check_srid_has_meter_unit, '_checked'): if SpatialReference(settings.SRID).units[1] != 'metre': err_msg = 'Unit of SRID EPSG:%s is not meter.' % settings.SRID raise ImproperlyConfigured(err_msg) check_srid_has_meter_unit._checked = True class Command(BaseCommand): def handle(self, *args, **options): check_srid_has_meter_unit() set_search_path() for app in apps.get_app_configs(): move_models_to_schemas(app) load_sql_files(app, 'pre') super().handle(*args, **options) call_command('sync_translation_fields', noinput=True) call_command('update_translation_fields') for app in apps.get_app_configs(): move_models_to_schemas(app) load_sql_files(app, 'post')
from django.apps import apps from django.conf import settings from django.contrib.gis.gdal import SpatialReference from django.core.exceptions import ImproperlyConfigured from django.core.management import call_command from django.core.management.commands.migrate import Command as BaseCommand from geotrek.common.utils.postgresql import move_models_to_schemas, load_sql_files, set_search_path def check_srid_has_meter_unit(): if not hasattr(check_srid_has_meter_unit, '_checked'): if SpatialReference(settings.SRID).units[1] != 'metre': err_msg = 'Unit of SRID EPSG:%s is not meter.' % settings.SRID raise ImproperlyConfigured(err_msg) check_srid_has_meter_unit._checked = True class Command(BaseCommand): def handle(self, *args, **options): check_srid_has_meter_unit() set_search_path() for app in apps.get_app_configs(): move_models_to_schemas(app) load_sql_files(app, 'pre') super().handle(*args, **options) call_command('sync_translation_fields', '--noinput') call_command('update_translation_fields') for app in apps.get_app_configs(): move_models_to_schemas(app) load_sql_files(app, 'post')
Fix no input call command
Fix no input call command
Python
bsd-2-clause
GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin
--- +++ @@ -24,7 +24,7 @@ move_models_to_schemas(app) load_sql_files(app, 'pre') super().handle(*args, **options) - call_command('sync_translation_fields', noinput=True) + call_command('sync_translation_fields', '--noinput') call_command('update_translation_fields') for app in apps.get_app_configs(): move_models_to_schemas(app)
20c77152d1b81fd3ab42d9e563bb7170ef96906c
tools/misc/python/test-phenodata-output.py
tools/misc/python/test-phenodata-output.py
# TOOL test-phenodata-output.py: "Test phenodata output in Python" () # OUTPUT phenodata.tsv # OUTPUT output.tsv with open('output.tsv', 'w') as f: f.write('test output\n') f.write('test output\n') with open('phenodata.tsv', 'w') as f: f.write('sample original_name chiptype group\n') f.write('microarray001.cel cancerGSM11814.cel hgu133ahsentrezg.db 1\n') f.write('microarray002.cel cancerGSM11830.cel hgu133ahsentrezg.db 2\n')
# TOOL test-phenodata-output.py: "Test phenodata output in Python" () # INPUT input{...}.tsv TYPE GENERIC # OUTPUT phenodata.tsv # OUTPUT output.tsv with open('output.tsv', 'w') as f: f.write('identifier chip.sample1\n') f.write('test output\n') with open('phenodata.tsv', 'w') as f: f.write('dataset column sample chiptype experiment group library_size\n') f.write('ngs-data-table.tsv chip.sample000.tsv sample000.tsv not applicable rna_seq \n') f.write('ngs-data-table.tsv chip.sample001.tsv sample001.tsv not applicable rna_seq \n')
Make output similar to Define NGS experiment tool
Make output similar to Define NGS experiment tool
Python
mit
chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools
--- +++ @@ -1,12 +1,14 @@ -# TOOL test-phenodata-output.py: "Test phenodata output in Python" () +# TOOL test-phenodata-output.py: "Test phenodata output in Python" () +# INPUT input{...}.tsv TYPE GENERIC # OUTPUT phenodata.tsv # OUTPUT output.tsv with open('output.tsv', 'w') as f: - f.write('test output\n') + f.write('identifier chip.sample1\n') f.write('test output\n') with open('phenodata.tsv', 'w') as f: - f.write('sample original_name chiptype group\n') - f.write('microarray001.cel cancerGSM11814.cel hgu133ahsentrezg.db 1\n') - f.write('microarray002.cel cancerGSM11830.cel hgu133ahsentrezg.db 2\n') + f.write('dataset column sample chiptype experiment group library_size\n') + f.write('ngs-data-table.tsv chip.sample000.tsv sample000.tsv not applicable rna_seq \n') + f.write('ngs-data-table.tsv chip.sample001.tsv sample001.tsv not applicable rna_seq \n') +
f96d2ceef6b15e397e82e310a3f369f61879a6d0
ptpython/validator.py
ptpython/validator.py
from __future__ import unicode_literals from prompt_toolkit.validation import Validator, ValidationError __all__ = ( 'PythonValidator', ) class PythonValidator(Validator): """ Validation of Python input. :param get_compiler_flags: Callable that returns the currently active compiler flags. """ def __init__(self, get_compiler_flags): self.get_compiler_flags = get_compiler_flags def validate(self, document): """ Check input for Python syntax errors. """ try: compile(document.text, '<input>', 'exec', flags=self.get_compiler_flags(), dont_inherit=True) except SyntaxError as e: # Note, the 'or 1' for offset is required because Python 2.7 # gives `None` as offset in case of '4=4' as input. (Looks like # fixed in Python 3.) index = document.translate_row_col_to_index(e.lineno - 1, (e.offset or 1) - 1) raise ValidationError(index, 'Syntax Error') except TypeError as e: # e.g. "compile() expected string without null bytes" raise ValidationError(0, str(e))
from __future__ import unicode_literals from prompt_toolkit.validation import Validator, ValidationError __all__ = ( 'PythonValidator', ) class PythonValidator(Validator): """ Validation of Python input. :param get_compiler_flags: Callable that returns the currently active compiler flags. """ def __init__(self, get_compiler_flags=None): self.get_compiler_flags = get_compiler_flags def validate(self, document): """ Check input for Python syntax errors. """ try: if self.get_compiler_flags: flags = self.get_compiler_flags() else: flags = 0 compile(document.text, '<input>', 'exec', flags=flags, dont_inherit=True) except SyntaxError as e: # Note, the 'or 1' for offset is required because Python 2.7 # gives `None` as offset in case of '4=4' as input. (Looks like # fixed in Python 3.) index = document.translate_row_col_to_index(e.lineno - 1, (e.offset or 1) - 1) raise ValidationError(index, 'Syntax Error') except TypeError as e: # e.g. "compile() expected string without null bytes" raise ValidationError(0, str(e))
Make get_compiler_flags optional for PythonValidator. (Fixes ptipython.)
Make get_compiler_flags optional for PythonValidator. (Fixes ptipython.)
Python
bsd-3-clause
jonathanslenders/ptpython
--- +++ @@ -14,7 +14,7 @@ :param get_compiler_flags: Callable that returns the currently active compiler flags. """ - def __init__(self, get_compiler_flags): + def __init__(self, get_compiler_flags=None): self.get_compiler_flags = get_compiler_flags def validate(self, document): @@ -22,8 +22,12 @@ Check input for Python syntax errors. """ try: - compile(document.text, '<input>', 'exec', - flags=self.get_compiler_flags(), dont_inherit=True) + if self.get_compiler_flags: + flags = self.get_compiler_flags() + else: + flags = 0 + + compile(document.text, '<input>', 'exec', flags=flags, dont_inherit=True) except SyntaxError as e: # Note, the 'or 1' for offset is required because Python 2.7 # gives `None` as offset in case of '4=4' as input. (Looks like
91a9da3bb1dda73add2a3040d35c9c58f7b5b4a5
alg_lonely_integer.py
alg_lonely_integer.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function def lonely_integer(): pass def main(): pass if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function def lonely_integer_naive(a_list): """Lonely integer by naive dictionary. Time complexity: O(n). Space complexity: O(n). """ integer_count_d = {} for x in a_list: if x in integer_count_d: integer_count_d[x] += 1 else: integer_count_d[x] = 1 for integer, count in integer_count_d.items(): if count == 1: return integer def lonely_integer(a_list): """Lonely integer by bit operation. Time complexity: O(n). Space complexity: O(1). """ integer = 0 for x in a_list: integer ^= x return integer def main(): import time a_list = [9, 1, 2, 3, 2, 9, 1, 7, 7] start_time = time.time() print('Find lonely integer by naive dictionary: {}' .format(lonely_integer_naive(a_list))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('Find lonely integer by bit operation: {}' .format(lonely_integer(a_list))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
Complete lonely int by naive dict & bit op
Complete lonely int by naive dict & bit op
Python
bsd-2-clause
bowen0701/algorithms_data_structures
--- +++ @@ -2,13 +2,53 @@ from __future__ import division from __future__ import print_function -def lonely_integer(): - pass + +def lonely_integer_naive(a_list): + """Lonely integer by naive dictionary. + + Time complexity: O(n). + Space complexity: O(n). + """ + integer_count_d = {} + + for x in a_list: + if x in integer_count_d: + integer_count_d[x] += 1 + else: + integer_count_d[x] = 1 + + for integer, count in integer_count_d.items(): + if count == 1: + return integer + + +def lonely_integer(a_list): + """Lonely integer by bit operation. + + Time complexity: O(n). + Space complexity: O(1). + """ + integer = 0 + for x in a_list: + integer ^= x + return integer def main(): - pass + import time + + a_list = [9, 1, 2, 3, 2, 9, 1, 7, 7] + + start_time = time.time() + print('Find lonely integer by naive dictionary: {}' + .format(lonely_integer_naive(a_list))) + print('Time: {}'.format(time.time() - start_time)) + + start_time = time.time() + print('Find lonely integer by bit operation: {}' + .format(lonely_integer(a_list))) + print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': - main() + main()
5455a7796ece5a8987ef99f70d469ed3770f6a76
server/src/weblab/db/gateway.py
server/src/weblab/db/gateway.py
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2005 onwards University of Deusto # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # # This software consists of contributions made by many individuals, # listed below: # # Author: Pablo Orduña <pablo@ordunya.com> # import weblab.configuration_doc as configuration_doc import weblab.db.exc as DbErrors import voodoo.configuration as CfgErrors class AbstractDatabaseGateway(object): def __init__(self, cfg_manager): try: self.host = cfg_manager.get_doc_value(configuration_doc.DB_HOST) self.database_name = cfg_manager.get_doc_value(configuration_doc.DB_DATABASE) self.engine_name = cfg_manager.get_doc_value(configuration_doc.DB_ENGINE) except CfgErrors.KeyNotFoundError as knfe: raise DbErrors.DbMisconfiguredError( "Configuration manager didn't provide values for at least one parameter: %s" % knfe, knfe )
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2005 onwards University of Deusto # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # # This software consists of contributions made by many individuals, # listed below: # # Author: Pablo Orduña <pablo@ordunya.com> # import weblab.configuration_doc as configuration_doc import weblab.db.exc as DbErrors import voodoo.configuration as CfgErrors class AbstractDatabaseGateway(object): def __init__(self, cfg_manager): self.cfg_manager = cfg_manager try: self.host = cfg_manager.get_doc_value(configuration_doc.DB_HOST) self.database_name = cfg_manager.get_doc_value(configuration_doc.DB_DATABASE) self.engine_name = cfg_manager.get_doc_value(configuration_doc.DB_ENGINE) except CfgErrors.KeyNotFoundError as knfe: raise DbErrors.DbMisconfiguredError( "Configuration manager didn't provide values for at least one parameter: %s" % knfe, knfe )
Fix cfg_manager not existing in AbstractDatabaseGateway
Fix cfg_manager not existing in AbstractDatabaseGateway
Python
bsd-2-clause
morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto
--- +++ @@ -19,7 +19,7 @@ class AbstractDatabaseGateway(object): def __init__(self, cfg_manager): - + self.cfg_manager = cfg_manager try: self.host = cfg_manager.get_doc_value(configuration_doc.DB_HOST) self.database_name = cfg_manager.get_doc_value(configuration_doc.DB_DATABASE)
7e97a1d060dc903f1e6a0c3f9f777edebabb99f7
indra/tests/test_rlimsp.py
indra/tests/test_rlimsp.py
from indra.sources import rlimsp def test_simple_usage(): rp = rlimsp.process_from_webservice('PMC3717945') stmts = rp.statements assert len(stmts) == 6, len(stmts) for s in stmts: assert len(s.evidence) == 1, "Wrong amount of evidence." ev = s.evidence[0] assert ev.annotations, "Missing annotations." assert 'agents' in ev.annotations.keys() assert 'trigger' in ev.annotations.keys() def test_ungrounded_usage(): rp = rlimsp.process_from_webservice('PMC3717945', with_grounding=False) assert len(rp.statements) == 33, len(rp.statements) def test_grounded_endpoint_with_pmids(): pmid_list = ['16403219', '22258404', '16961925', '22096607'] for pmid in pmid_list: rp = rlimsp.process_from_webservice(pmid, id_type='pmid', with_grounding=False) assert len(rp.statements) > 10, len(rp.statements)
from indra.sources import rlimsp def test_simple_usage(): rp = rlimsp.process_from_webservice('PMC3717945') stmts = rp.statements assert len(stmts) == 6, len(stmts) for s in stmts: assert len(s.evidence) == 1, "Wrong amount of evidence." ev = s.evidence[0] assert ev.annotations, "Missing annotations." assert 'agents' in ev.annotations.keys() assert 'trigger' in ev.annotations.keys() def test_ungrounded_usage(): rp = rlimsp.process_from_webservice('PMC3717945', with_grounding=False) assert len(rp.statements) == 33, len(rp.statements) def test_grounded_endpoint_with_pmids(): pmid_list = ['16403219', '22258404', '16961925', '22096607'] stmts = [] for pmid in pmid_list: rp = rlimsp.process_from_webservice(pmid, id_type='pmid', with_grounding=False) assert len(rp.statements) > 10, len(rp.statements) stmts.extend(rp.statements) assert len(stmts) == 397, len(stmts) return
Check final count of statements.
Check final count of statements.
Python
bsd-2-clause
pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/indra
--- +++ @@ -20,7 +20,11 @@ def test_grounded_endpoint_with_pmids(): pmid_list = ['16403219', '22258404', '16961925', '22096607'] + stmts = [] for pmid in pmid_list: rp = rlimsp.process_from_webservice(pmid, id_type='pmid', with_grounding=False) assert len(rp.statements) > 10, len(rp.statements) + stmts.extend(rp.statements) + assert len(stmts) == 397, len(stmts) + return
127a6d65b46e94f7698ae9739c1770903068351a
bempy/django/views.py
bempy/django/views.py
# -*- coding: utf-8 -*- import os.path from django.http import HttpResponse from django.conf import settings from functools import wraps from bempy import ImmediateResponse def returns_blocks(func): @wraps(func) def wrapper(request): page = func(request) try: if isinstance(page, HttpResponse): return page else: response = HttpResponse(page('render', request=request, js_filename='bempy.js', css_filename='bempy.css')) return response except ImmediateResponse as e: return e.response wrapper.block = func return wrapper
# -*- coding: utf-8 -*- import os.path from django.http import HttpResponse from django.conf import settings from functools import wraps from bempy import ImmediateResponse def returns_blocks(func): @wraps(func) def wrapper(request, *args, **kwargs): page = func(request, *args, **kwargs) try: if isinstance(page, HttpResponse): return page else: response = HttpResponse(page('render', request=request, js_filename='bempy.js', css_filename='bempy.css')) return response except ImmediateResponse as e: return e.response wrapper.block = func return wrapper
Make wrapper returned by `returns_blocks` decorator, understand additional `args` and `kwargs` arguments.
Make wrapper returned by `returns_blocks` decorator, understand additional `args` and `kwargs` arguments.
Python
bsd-3-clause
svetlyak40wt/bempy,svetlyak40wt/bempy,svetlyak40wt/bempy
--- +++ @@ -9,9 +9,9 @@ def returns_blocks(func): @wraps(func) - def wrapper(request): - page = func(request) - + def wrapper(request, *args, **kwargs): + page = func(request, *args, **kwargs) + try: if isinstance(page, HttpResponse): return page
6ed4aa25d5e7113df75a622f6e86e83e9ace1390
djangoautoconf/cmd_handler_base/database_connection_maintainer.py
djangoautoconf/cmd_handler_base/database_connection_maintainer.py
import thread import time from django.db import close_old_connections class DatabaseConnectionMaintainer(object): def __init__(self): self.clients = set() # self.device_to_protocol = {} self.is_recent_db_change_occurred = False self.delay_and_execute(3600, self.close_db_connection_if_needed) def close_db_connection_if_needed(self): if not self.is_recent_db_change_occurred: close_old_connections() print "db connection closed" self.is_recent_db_change_occurred = False self.delay_and_execute(3600, self.close_db_connection_if_needed) def refresh_timeout(self): self.is_recent_db_change_occurred = True def delay_and_execute(self, timeout, callback): thread.start_new_thread(self.periodical_task, (timeout, callback)) # noinspection PyMethodMayBeStatic def periodical_task(self, timeout, callback): time.sleep(timeout) callback()
import thread import time from django.db import close_old_connections class DatabaseConnectionMaintainer(object): DB_TIMEOUT_SECONDS = 5*60 def __init__(self): self.clients = set() # self.device_to_protocol = {} self.is_recent_db_change_occurred = False self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed) def close_db_connection_if_needed(self): if not self.is_recent_db_change_occurred: close_old_connections() print "db connection closed" self.is_recent_db_change_occurred = False self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed) def refresh_timeout(self): self.is_recent_db_change_occurred = True def delay_and_execute(self, timeout, callback): thread.start_new_thread(self.periodical_task, (timeout, callback)) # noinspection PyMethodMayBeStatic def periodical_task(self, timeout, callback): time.sleep(timeout) callback()
Update database close timeout value.
Update database close timeout value.
Python
bsd-3-clause
weijia/djangoautoconf,weijia/djangoautoconf
--- +++ @@ -5,18 +5,20 @@ class DatabaseConnectionMaintainer(object): + DB_TIMEOUT_SECONDS = 5*60 + def __init__(self): self.clients = set() # self.device_to_protocol = {} self.is_recent_db_change_occurred = False - self.delay_and_execute(3600, self.close_db_connection_if_needed) + self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed) def close_db_connection_if_needed(self): if not self.is_recent_db_change_occurred: close_old_connections() print "db connection closed" self.is_recent_db_change_occurred = False - self.delay_and_execute(3600, self.close_db_connection_if_needed) + self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed) def refresh_timeout(self): self.is_recent_db_change_occurred = True
2c502a77ad18d34470e2be89ed1c7a38e6f3799d
tests/test_drogher.py
tests/test_drogher.py
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_dhl_barcode(self): shipper = drogher.barcode('1656740256') assert shipper.shipper == 'DHL' def test_fedex_express_barcode(self): shipper = drogher.barcode('9632001960000000000400152152152158') assert shipper.shipper == 'FedEx' def test_fedex_ground_barcode(self): shipper = drogher.barcode('9611019012345612345671') assert shipper.shipper == 'FedEx' def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_usps_barcode(self): shipper = drogher.barcode('420221539101026837331000039521') assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
Test barcode function with all shippers
Test barcode function with all shippers
Python
bsd-3-clause
jbittel/drogher
--- +++ @@ -5,9 +5,25 @@ class TestDrogher: - def test_barcode(self): + def test_dhl_barcode(self): + shipper = drogher.barcode('1656740256') + assert shipper.shipper == 'DHL' + + def test_fedex_express_barcode(self): + shipper = drogher.barcode('9632001960000000000400152152152158') + assert shipper.shipper == 'FedEx' + + def test_fedex_ground_barcode(self): + shipper = drogher.barcode('9611019012345612345671') + assert shipper.shipper == 'FedEx' + + def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' + + def test_usps_barcode(self): + shipper = drogher.barcode('420221539101026837331000039521') + assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode):
cca4f761ed39bd970c76c3b4ca581511bfe0130d
web/app/djrq/templates/letterscountsbar.py
web/app/djrq/templates/letterscountsbar.py
# encoding: cinje : from cinje.std.html import link, div, span : from urllib.parse import urlencode, quote_plus : def letterscountsbar ctx, letterscountslist : try : selected_letter = ctx.selected_letter : except AttributeError : selected_letter = None : end <div class="col-sm-1 list-group"> : for row in letterscountslist : if row.letter == '' : print("Skip Letter: |{}|".format(row.letter), dir(row.letter)) : continue :end : l = quote_plus(row.letter) <a href="/${ctx.resource.__resource__}/?letter=${l}" tip='${row.count}' class='list-group-item #{"active" if selected_letter == row.letter else ""}'> ${row.letter} <span class='badge'>${row.count}</span> </a> : end </div> : end
# encoding: cinje : from cinje.std.html import link, div, span : from urllib.parse import urlencode, quote_plus : def letterscountsbar ctx, letterscountslist : try : selected_letter = ctx.selected_letter : except AttributeError : selected_letter = None : end <div class="col-sm-1 list-group"> : for row in letterscountslist : if row.letter == '' : print("Skip Letter: |{}|".format(row.letter), dir(row.letter)) : continue :end : l = quote_plus(row.letter) <a href="/${ctx.resource.__resource__}/?letter=${l}" tip='${row.count}' class='list-group-item #{"active" if selected_letter == row.letter else ""}'> : if row.letter == ' ' &nbsp; : else ${row.letter} : end </span> <span class='badge'>${row.count}</span> </a> : end </div> : end
Fix problem on letter list display when the letter is a <space>
Fix problem on letter list display when the letter is a <space> Work around for now for issue #10. And will need to be permanant, since the real fix to the database import would make removing the space optional.
Python
mit
bmillham/djrq2,bmillham/djrq2,bmillham/djrq2
--- +++ @@ -20,7 +20,13 @@ <a href="/${ctx.resource.__resource__}/?letter=${l}" tip='${row.count}' class='list-group-item #{"active" if selected_letter == row.letter else ""}'> - ${row.letter} <span class='badge'>${row.count}</span> + : if row.letter == ' ' + &nbsp; + : else + ${row.letter} + : end + </span> + <span class='badge'>${row.count}</span> </a> : end </div>
d80c726fcf36a2dc439ce12717f9e88161501358
gather/node/models.py
gather/node/models.py
# -*- coding:utf-8 -*- from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self
# -*- coding:utf-8 -*- from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.order_by(Node.name.asc()).all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self
Add order in nodes in topic creation form
Add order in nodes in topic creation form
Python
mit
whtsky/Gather,whtsky/Gather
--- +++ @@ -19,7 +19,7 @@ @classmethod def query_all(cls): - return cls.query.all() + return cls.query.order_by(Node.name.asc()).all() def to_dict(self): return dict(
5d70735ab4254509e1efed73be4eecf77629063e
github_hook_server.py
github_hook_server.py
#! /usr/bin/env python """ Our github hook receiving server. """ import os from flask import Flask from flask_hookserver import Hooks from github import validate_review_request from slack import notify_reviewer app = Flask(__name__) app.config['GITHUB_WEBHOOKS_KEY'] = os.environ.get('GITHUB_WEBHOOKS_KEY') app.config['VALIDATE_IP'] = os.environ.get('GIT_HOOK_VALIDATE_IP', True) app.config['VALIDATE_SIGNATURE'] = os.environ.get('GIT_HOOK_VALIDATE_SIGNATURE', True) hooks = Hooks(app, url='/hooks') @hooks.hook('ping') def ping(data, guid): return 'pong' @hooks.hook('pull_request') def pull_request(data, guid): if validate_review_request(data): notify_reviewer(data) result = 'Reviewer Notified' else: result = 'Action ({}) ignored'.format(data.get('action')) return result app.run(host='0.0.0.0')
#! /usr/bin/env python """ Our github hook receiving server. """ import os from flask import Flask from flask_hookserver import Hooks from github import validate_review_request from slack import notify_reviewer app = Flask(__name__) app.config['GITHUB_WEBHOOKS_KEY'] = os.environ.get('GITHUB_WEBHOOKS_KEY') if os.environ.get('GIT_HOOK_VALIDATE_IP', 'True').lower() in ['false', '0']: app.config['VALIDATE_IP'] = False if os.environ.get('GIT_HOOK_VALIDATE_SIGNATURE', 'True').lower() in ['false', '0']: app.config['VALIDATE_SIGNATURE'] = False hooks = Hooks(app, url='/hooks') @hooks.hook('ping') def ping(data, guid): return 'pong' @hooks.hook('pull_request') def pull_request(data, guid): if validate_review_request(data): notify_reviewer(data) result = 'Reviewer Notified' else: result = 'Action ({}) ignored'.format(data.get('action')) return result app.run(host='0.0.0.0')
Fix env vars always evaluating as true
Fix env vars always evaluating as true Env vars come through as strings. Which are true. The default on our values is also true. Meaning it would never change. So we have to do special string checking to get it to actually change. Yay.
Python
mit
DobaTech/github-review-slack-notifier
--- +++ @@ -11,8 +11,10 @@ app = Flask(__name__) app.config['GITHUB_WEBHOOKS_KEY'] = os.environ.get('GITHUB_WEBHOOKS_KEY') -app.config['VALIDATE_IP'] = os.environ.get('GIT_HOOK_VALIDATE_IP', True) -app.config['VALIDATE_SIGNATURE'] = os.environ.get('GIT_HOOK_VALIDATE_SIGNATURE', True) +if os.environ.get('GIT_HOOK_VALIDATE_IP', 'True').lower() in ['false', '0']: + app.config['VALIDATE_IP'] = False +if os.environ.get('GIT_HOOK_VALIDATE_SIGNATURE', 'True').lower() in ['false', '0']: + app.config['VALIDATE_SIGNATURE'] = False hooks = Hooks(app, url='/hooks')
93081d423a73a6b16e5adfb94247ffec23ef667c
api/base/authentication/backends.py
api/base/authentication/backends.py
from osf.models.user import OSFUser from framework.auth.core import get_user from django.contrib.auth.backends import ModelBackend # https://docs.djangoproject.com/en/1.8/topics/auth/customizing/ class ODMBackend(ModelBackend): def authenticate(self, username=None, password=None): return get_user(email=username, password=password) or None def get_user(self, user_id): try: user = OSFUser.objects.get(id=user_id) except OSFUser.DoesNotExist: user = OSFUser.load(user_id) return user
from osf.models.user import OSFUser from framework.auth.core import get_user from django.contrib.auth.backends import ModelBackend # https://docs.djangoproject.com/en/3.2/topics/auth/customizing/ class ODMBackend(ModelBackend): def authenticate(self, request, username=None, password=None, **kwargs): return get_user(email=username, password=password) or None def get_user(self, user_id): try: user = OSFUser.objects.get(id=user_id) except OSFUser.DoesNotExist: user = OSFUser.load(user_id) return user
Fix admin login failure for django upgrade
Fix admin login failure for django upgrade
Python
apache-2.0
Johnetordoff/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io
--- +++ @@ -2,10 +2,11 @@ from framework.auth.core import get_user from django.contrib.auth.backends import ModelBackend -# https://docs.djangoproject.com/en/1.8/topics/auth/customizing/ + +# https://docs.djangoproject.com/en/3.2/topics/auth/customizing/ class ODMBackend(ModelBackend): - def authenticate(self, username=None, password=None): + def authenticate(self, request, username=None, password=None, **kwargs): return get_user(email=username, password=password) or None def get_user(self, user_id):
47ea8ca6034bfbb16e9049081b5158d9b01833fc
pytest_ansible/__init__.py
pytest_ansible/__init__.py
__version__ = '1.4.0' __author__ = "James Laska" __author_email__ = "<jlaska@ansible.com>"
__version__ = '2.0.0' __author__ = "James Laska" __author_email__ = "<jlaska@ansible.com>"
Prepare for next major release
Prepare for next major release
Python
mit
jlaska/pytest-ansible
--- +++ @@ -1,3 +1,3 @@ -__version__ = '1.4.0' +__version__ = '2.0.0' __author__ = "James Laska" __author_email__ = "<jlaska@ansible.com>"
db4355ce0345df9dd23b937370f5f0d4cb2164e9
zc_common/remote_resource/filters.py
zc_common/remote_resource/filters.py
import re from django.db.models.fields.related import ManyToManyField from rest_framework import filters class JSONAPIFilterBackend(filters.DjangoFilterBackend): def filter_queryset(self, request, queryset, view): filter_class = self.get_filter_class(view, queryset) primary_key = queryset.model._meta.pk.name query_params = {} for param, value in request.query_params.iteritems(): match = re.search(r'^filter\[(\w+)\]$', param) if match: field_name = match.group(1) try: name, extra = field_name.split('__') except ValueError: name = field_name extra = None if name not in view.filter_fields.keys(): return queryset.none() if len(field_name) > 1 and field_name[:2] == 'id': query_params['{0}__{1}'.format(primary_key, extra)] = value if hasattr(queryset.model, field_name)\ and isinstance(getattr(queryset.model, field_name).field, ManyToManyField): value = value.split(',') query_params[field_name] = value if filter_class: return filter_class(query_params, queryset=queryset).qs return queryset
import re from distutils.util import strtobool from django.db.models import BooleanField, FieldDoesNotExist from django.db.models.fields.related import ManyToManyField from rest_framework import filters class JSONAPIFilterBackend(filters.DjangoFilterBackend): def filter_queryset(self, request, queryset, view): filter_class = self.get_filter_class(view, queryset) primary_key = queryset.model._meta.pk.name query_params = {} for param, value in request.query_params.iteritems(): match = re.search(r'^filter\[(\w+)\]$', param) if match: field_name = match.group(1) try: name, extra = field_name.split('__') except ValueError: name = field_name extra = None if name not in view.filter_fields.keys(): return queryset.none() if len(field_name) > 1 and field_name[:2] == 'id': query_params['{0}__{1}'.format(primary_key, extra)] = value if hasattr(queryset.model, field_name)\ and isinstance(getattr(queryset.model, field_name).field, ManyToManyField): value = value.split(',') # Allow 'true' or 'false' as values for boolean fields try: if isinstance(queryset.model._meta.get_field(field_name), BooleanField): value = bool(strtobool(value)) except FieldDoesNotExist: pass query_params[field_name] = value if filter_class: return filter_class(query_params, queryset=queryset).qs return queryset
Use 'true' while filtering a boolean as opposed to 'True'
Use 'true' while filtering a boolean as opposed to 'True'
Python
mit
ZeroCater/zc_common,ZeroCater/zc_common
--- +++ @@ -1,5 +1,7 @@ import re +from distutils.util import strtobool +from django.db.models import BooleanField, FieldDoesNotExist from django.db.models.fields.related import ManyToManyField from rest_framework import filters @@ -27,6 +29,14 @@ if hasattr(queryset.model, field_name)\ and isinstance(getattr(queryset.model, field_name).field, ManyToManyField): value = value.split(',') + + # Allow 'true' or 'false' as values for boolean fields + try: + if isinstance(queryset.model._meta.get_field(field_name), BooleanField): + value = bool(strtobool(value)) + except FieldDoesNotExist: + pass + query_params[field_name] = value if filter_class:
a789f05ac157effabe3426611b3ea7e6cef5fb3d
makahiki/apps/managers/team_mgr/team_mgr.py
makahiki/apps/managers/team_mgr/team_mgr.py
"""The manager for managing team.""" from apps.managers.score_mgr import score_mgr from apps.managers.team_mgr.models import Team def team_members(team): """Get the team members.""" return team.profile_set() def team_points_leader(round_name="Overall"): """Returns the team points leader (the first place) across all groups, as a Team object.""" team_id = score_mgr.team_points_leader(round_name=round_name) if team_id: return Team.objects.get(id=team_id) else: return Team.objects.all()[0] def team_points_leaders(num_results=10, round_name="Overall"): """Returns the team points leaders across all groups, as a dictionary profile__team__name and points. """ entry = score_mgr.team_points_leaders(num_results=num_results, round_name=round_name) if entry: return entry else: return Team.objects.all().extra(select={'profile__team__name': 'name', 'points': 0}).values( 'profile__team__name', 'points')[:num_results]
"""The manager for managing team.""" from apps.managers.score_mgr import score_mgr from apps.managers.team_mgr.models import Team def team_members(team): """Get the team members.""" return team.profile_set.all() def team_points_leader(round_name="Overall"): """Returns the team points leader (the first place) across all groups, as a Team object.""" team_id = score_mgr.team_points_leader(round_name=round_name) if team_id: return Team.objects.get(id=team_id) else: return Team.objects.all()[0] def team_points_leaders(num_results=10, round_name="Overall"): """Returns the team points leaders across all groups, as a dictionary profile__team__name and points. """ entry = score_mgr.team_points_leaders(num_results=num_results, round_name=round_name) if entry: return entry else: return Team.objects.all().extra(select={'profile__team__name': 'name', 'points': 0}).values( 'profile__team__name', 'points')[:num_results]
Fix for all members in the news page.
Fix for all members in the news page.
Python
mit
jtakayama/makahiki-draft,yongwen/makahiki,jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,yongwen/makahiki,jtakayama/ics691-setupbooster,yongwen/makahiki,yongwen/makahiki,jtakayama/makahiki-draft,jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,csdl/makahiki,csdl/makahiki,csdl/makahiki,justinslee/Wai-Not-Makahiki,csdl/makahiki
--- +++ @@ -5,7 +5,7 @@ def team_members(team): """Get the team members.""" - return team.profile_set() + return team.profile_set.all() def team_points_leader(round_name="Overall"):
583ea6c1a234ab9d484b1e80e7f567d9a5d2fb71
shopify/resources/image.py
shopify/resources/image.py
from ..base import ShopifyResource import base64 import re class Image(ShopifyResource): _prefix_source = "/admin/products/$product_id/" def __getattr__(self, name): if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]: return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src) else: return super(Image, self).__getattr__(name) def attach_image(self, data, filename=None): self.attributes["attachment"] = base64.b64encode(data) if filename: self.attributes["filename"] = filename
from ..base import ShopifyResource from ..resources import Metafield from six.moves import urllib import base64 import re class Image(ShopifyResource): _prefix_source = "/admin/products/$product_id/" def __getattr__(self, name): if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]: return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src) else: return super(Image, self).__getattr__(name) def attach_image(self, data, filename=None): self.attributes["attachment"] = base64.b64encode(data) if filename: self.attributes["filename"] = filename def metafields(self): if self.is_new(): return [] query_params = { 'metafield[owner_id]': self.id, 'metafield[owner_resource]': 'product_image' } return Metafield.find(from_ = '/admin/metafields.json?%s' % urllib.parse.urlencode(query_params))
Add `metafields()` method to `Image` resource.
Add `metafields()` method to `Image` resource.
Python
mit
asiviero/shopify_python_api,SmileyJames/shopify_python_api,Shopify/shopify_python_api,metric-collective/shopify_python_api,gavinballard/shopify_python_api,ifnull/shopify_python_api
--- +++ @@ -1,4 +1,6 @@ from ..base import ShopifyResource +from ..resources import Metafield +from six.moves import urllib import base64 import re @@ -16,3 +18,9 @@ self.attributes["attachment"] = base64.b64encode(data) if filename: self.attributes["filename"] = filename + + def metafields(self): + if self.is_new(): + return [] + query_params = { 'metafield[owner_id]': self.id, 'metafield[owner_resource]': 'product_image' } + return Metafield.find(from_ = '/admin/metafields.json?%s' % urllib.parse.urlencode(query_params))
b771fd2a463266e1c80b1b4cccfe78d822c391a2
byceps/blueprints/admin/shop/order/forms.py
byceps/blueprints/admin/shop/order/forms.py
""" byceps.blueprints.admin.shop.order.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from flask_babel import lazy_gettext from wtforms import BooleanField, RadioField, StringField, TextAreaField from wtforms.validators import InputRequired, Length from .....services.shop.order import service as order_service from .....services.shop.order.transfer.order import PAYMENT_METHODS from .....util.l10n import LocalizedForm class AddNoteForm(LocalizedForm): text = TextAreaField(lazy_gettext('Text'), validators=[InputRequired()]) class CancelForm(LocalizedForm): reason = TextAreaField( lazy_gettext('Reason'), validators=[InputRequired(), Length(max=1000)], ) send_email = BooleanField( lazy_gettext('Inform orderer via email of cancelation.') ) def _get_payment_method_choices(): return [ (pm, order_service.find_payment_method_label(pm) or pm) for pm in PAYMENT_METHODS ] class MarkAsPaidForm(LocalizedForm): payment_method = RadioField( lazy_gettext('Payment type'), choices=_get_payment_method_choices(), default='bank_transfer', validators=[InputRequired()], ) class OrderNumberSequenceCreateForm(LocalizedForm): prefix = StringField( lazy_gettext('Static prefix'), validators=[InputRequired()] )
""" byceps.blueprints.admin.shop.order.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from flask_babel import lazy_gettext from wtforms import BooleanField, RadioField, StringField, TextAreaField from wtforms.validators import InputRequired, Length from .....services.shop.order import service as order_service from .....services.shop.order.transfer.order import PAYMENT_METHODS from .....util.l10n import LocalizedForm class AddNoteForm(LocalizedForm): text = TextAreaField(lazy_gettext('Text'), validators=[InputRequired()]) class CancelForm(LocalizedForm): reason = TextAreaField( lazy_gettext('Reason'), validators=[InputRequired(), Length(max=1000)], ) send_email = BooleanField( lazy_gettext('Inform orderer via email of cancelation.') ) def _get_payment_method_choices(): choices = [ (pm, order_service.find_payment_method_label(pm) or pm) for pm in PAYMENT_METHODS ] choices.sort() return choices class MarkAsPaidForm(LocalizedForm): payment_method = RadioField( lazy_gettext('Payment type'), choices=_get_payment_method_choices(), default='bank_transfer', validators=[InputRequired()], ) class OrderNumberSequenceCreateForm(LocalizedForm): prefix = StringField( lazy_gettext('Static prefix'), validators=[InputRequired()] )
Stabilize order of payment methods in mark-as-paid form
Stabilize order of payment methods in mark-as-paid form
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
--- +++ @@ -30,10 +30,12 @@ def _get_payment_method_choices(): - return [ + choices = [ (pm, order_service.find_payment_method_label(pm) or pm) for pm in PAYMENT_METHODS ] + choices.sort() + return choices class MarkAsPaidForm(LocalizedForm):
ef7ac85615d9790525a6e5364105613905cae859
tests/test_backend.py
tests/test_backend.py
from datetime import datetime import pytest import pytz from todoman.model import Todo, VtodoWritter def test_serialize_created_at(todo_factory): now = datetime.now(tz=pytz.UTC) todo = todo_factory(created_at=now) vtodo = VtodoWritter(todo).serialize() assert vtodo.get('created') is not None def test_serialize_dtstart(todo_factory): now = datetime.now(tz=pytz.UTC) todo = todo_factory(start=now) vtodo = VtodoWritter(todo).serialize() assert vtodo.get('dtstart') is not None def test_serializer_raises(todo_factory): todo = todo_factory() writter = VtodoWritter(todo) with pytest.raises(Exception): writter.serialize_field('nonexistant', 7) def test_supported_fields_are_serializeable(): supported_fields = set(Todo.ALL_SUPPORTED_FIELDS) serialized_fields = set(VtodoWritter.FIELD_MAP.keys()) assert supported_fields == serialized_fields
from datetime import datetime import pytest import pytz from dateutil.tz import tzlocal from todoman.model import Todo, VtodoWritter def test_serialize_created_at(todo_factory): now = datetime.now(tz=pytz.UTC) todo = todo_factory(created_at=now) vtodo = VtodoWritter(todo).serialize() assert vtodo.get('created') is not None def test_serialize_dtstart(todo_factory): now = datetime.now(tz=pytz.UTC) todo = todo_factory(start=now) vtodo = VtodoWritter(todo).serialize() assert vtodo.get('dtstart') is not None def test_serializer_raises(todo_factory): todo = todo_factory() writter = VtodoWritter(todo) with pytest.raises(Exception): writter.serialize_field('nonexistant', 7) def test_supported_fields_are_serializeable(): supported_fields = set(Todo.ALL_SUPPORTED_FIELDS) serialized_fields = set(VtodoWritter.FIELD_MAP.keys()) assert supported_fields == serialized_fields def test_vtodo_serialization(todo_factory): """Test VTODO serialization: one field of each type.""" description = 'A tea would be nice, thanks.' todo = todo_factory( categories=['tea', 'drinking', 'hot'], description=description, due=datetime(3000, 3, 21), priority=7, status='IN-PROCESS', summary='Some tea', ) writer = VtodoWritter(todo) vtodo = writer.serialize() assert str(vtodo.get('categories')) == 'tea,drinking,hot' assert str(vtodo.get('description')) == description assert vtodo.get('priority') == 7 assert vtodo.decoded('due') == datetime(3000, 3, 21, tzinfo=tzlocal()) assert str(vtodo.get('status')) == 'IN-PROCESS'
Test VTODO serialization for all data types
Test VTODO serialization for all data types
Python
isc
pimutils/todoman,AnubhaAgrawal/todoman,Sakshisaraswat/todoman,hobarrera/todoman
--- +++ @@ -2,6 +2,7 @@ import pytest import pytz +from dateutil.tz import tzlocal from todoman.model import Todo, VtodoWritter @@ -35,3 +36,24 @@ serialized_fields = set(VtodoWritter.FIELD_MAP.keys()) assert supported_fields == serialized_fields + + +def test_vtodo_serialization(todo_factory): + """Test VTODO serialization: one field of each type.""" + description = 'A tea would be nice, thanks.' + todo = todo_factory( + categories=['tea', 'drinking', 'hot'], + description=description, + due=datetime(3000, 3, 21), + priority=7, + status='IN-PROCESS', + summary='Some tea', + ) + writer = VtodoWritter(todo) + vtodo = writer.serialize() + + assert str(vtodo.get('categories')) == 'tea,drinking,hot' + assert str(vtodo.get('description')) == description + assert vtodo.get('priority') == 7 + assert vtodo.decoded('due') == datetime(3000, 3, 21, tzinfo=tzlocal()) + assert str(vtodo.get('status')) == 'IN-PROCESS'
216a52d52fdbfca959946434a81f4d42270bfd95
bluebottle/organizations/serializers.py
bluebottle/organizations/serializers.py
from rest_framework import serializers from bluebottle.organizations.models import Organization from bluebottle.utils.serializers import URLField class OrganizationSerializer(serializers.ModelSerializer): class Meta: model = Organization fields = ('id', 'name', 'slug', 'address_line1', 'address_line2', 'city', 'state', 'country', 'postal_code', 'phone_number', 'website', 'email') class ManageOrganizationSerializer(serializers.ModelSerializer): slug = serializers.SlugField(required=False, allow_null=True) name = serializers.CharField(required=True, allow_blank=True) website = URLField(required=False, allow_blank=True) email = serializers.EmailField(required=False, allow_blank=True) class Meta: model = Organization fields = OrganizationSerializer.Meta.fields + ('partner_organizations', 'created', 'updated')
from rest_framework import serializers from bluebottle.organizations.models import Organization from bluebottle.utils.serializers import URLField class OrganizationSerializer(serializers.ModelSerializer): class Meta: model = Organization fields = ('id', 'name', 'slug', 'address_line1', 'address_line2', 'city', 'state', 'country', 'postal_code', 'phone_number', 'website', 'email') class ManageOrganizationSerializer(serializers.ModelSerializer): slug = serializers.SlugField(required=False, allow_null=True) name = serializers.CharField(required=True) website = URLField(required=False, allow_blank=True) email = serializers.EmailField(required=False, allow_blank=True) class Meta: model = Organization fields = OrganizationSerializer.Meta.fields + ('partner_organizations', 'created', 'updated')
Make the name of an organization required
Make the name of an organization required
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle
--- +++ @@ -14,7 +14,7 @@ class ManageOrganizationSerializer(serializers.ModelSerializer): slug = serializers.SlugField(required=False, allow_null=True) - name = serializers.CharField(required=True, allow_blank=True) + name = serializers.CharField(required=True) website = URLField(required=False, allow_blank=True) email = serializers.EmailField(required=False, allow_blank=True)
bf5518f2f181879141279d9322fe75f3163d438d
byceps/services/news/transfer/models.py
byceps/services/news/transfer/models.py
""" byceps.services.news.transfer.models ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from dataclasses import dataclass from datetime import datetime from typing import List, NewType from uuid import UUID from ....typing import BrandID, UserID ChannelID = NewType('ChannelID', str) ItemID = NewType('ItemID', UUID) ItemVersionID = NewType('ItemVersionID', UUID) ImageID = NewType('ImageID', UUID) @dataclass(frozen=True) class Channel: id: ChannelID brand_id: BrandID url_prefix: str @dataclass(frozen=True) class Image: id: ImageID created_at: datetime creator_id: UserID item_id: ItemID number: int filename: str alt_text: str caption: str attribution: str @dataclass(frozen=True) class Item: id: ItemID channel: Channel slug: str published_at: datetime published: bool title: str body: str external_url: str image_url_path: str images: List[Image]
""" byceps.services.news.transfer.models ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from dataclasses import dataclass from datetime import datetime from typing import List, NewType, Optional from uuid import UUID from ....typing import BrandID, UserID ChannelID = NewType('ChannelID', str) ItemID = NewType('ItemID', UUID) ItemVersionID = NewType('ItemVersionID', UUID) ImageID = NewType('ImageID', UUID) @dataclass(frozen=True) class Channel: id: ChannelID brand_id: BrandID url_prefix: str @dataclass(frozen=True) class Image: id: ImageID created_at: datetime creator_id: UserID item_id: ItemID number: int filename: str alt_text: Optional[str] caption: Optional[str] attribution: Optional[str] @dataclass(frozen=True) class Item: id: ItemID channel: Channel slug: str published_at: Optional[datetime published: bool title: str body: str external_url: str image_url_path: Optional[str] images: List[Image]
Mark optional news image and item fields as such
Mark optional news image and item fields as such
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
--- +++ @@ -8,7 +8,7 @@ from dataclasses import dataclass from datetime import datetime -from typing import List, NewType +from typing import List, NewType, Optional from uuid import UUID from ....typing import BrandID, UserID @@ -41,9 +41,9 @@ item_id: ItemID number: int filename: str - alt_text: str - caption: str - attribution: str + alt_text: Optional[str] + caption: Optional[str] + attribution: Optional[str] @dataclass(frozen=True) @@ -51,10 +51,10 @@ id: ItemID channel: Channel slug: str - published_at: datetime + published_at: Optional[datetime published: bool title: str body: str external_url: str - image_url_path: str + image_url_path: Optional[str] images: List[Image]
c2a0b4c99f515a6fafae85d72be9d54b3c92c0b3
databench/__init__.py
databench/__init__.py
"""Databench module.""" __version__ = "0.3.0" # Need to make sure monkey.patch_all() is applied before any # 'import threading', but cannot raise error because building the Sphinx # documentation also suffers from this problem, but there it can be ignored. import sys if 'threading' in sys.modules: print 'WARNING: The threading module needs to be patched before use. ' \ 'Do "import databench" before any "import threading".' import gevent.monkey gevent.monkey.patch_all() from .analysis import LIST_ALL_META, Meta, Analysis, MetaZMQ, AnalysisZMQ from .app import run
"""Databench module.""" __version__ = "0.3.0" # Need to make sure monkey.patch_all() is applied before any # 'import threading', but cannot raise error because building the Sphinx # documentation also suffers from this problem, but there it can be ignored. import sys if 'threading' in sys.modules: print 'WARNING: The threading module needs to be patched before use. ' \ 'Do "import databench" before any "import threading".' import gevent.monkey gevent.monkey.patch_all() from .analysis import Meta, Analysis, MetaZMQ, AnalysisZMQ from .app import run
Change all instances list of Meta from a global to a class variable.
Change all instances list of Meta from a global to a class variable.
Python
mit
svenkreiss/databench,svenkreiss/databench,svenkreiss/databench,svenkreiss/databench
--- +++ @@ -12,5 +12,5 @@ import gevent.monkey gevent.monkey.patch_all() -from .analysis import LIST_ALL_META, Meta, Analysis, MetaZMQ, AnalysisZMQ +from .analysis import Meta, Analysis, MetaZMQ, AnalysisZMQ from .app import run
e16960eaaf38513e80fb18580c3e4320978407e4
chainer/training/triggers/__init__.py
chainer/training/triggers/__init__.py
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
Fix the order of importing
Fix the order of importing
Python
mit
wkentaro/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,jnishi/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,niboshi/chainer,jnishi/chainer,hvy/chainer,chainer/chainer,pfnet/chainer,niboshi/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,keisuke-umezawa/chainer,hvy/chainer,rezoo/chainer,okuta/chainer,okuta/chainer,okuta/chainer,aonotas/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,ronekko/chainer,jnishi/chainer,okuta/chainer,anaruse/chainer,tkerola/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer
--- +++ @@ -9,4 +9,3 @@ from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA -from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
b60fbc21271a7efa09d256debb17f583ec83fdf2
MMCorePy_wrap/setup.py
MMCorePy_wrap/setup.py
#!/usr/bin/env python """ setup.py file for SWIG example """ from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', '../MMDevice/Property.cpp', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp'], language = "c++", extra_objects = [], include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup (name = 'MMCorePy', version = '0.1', author = "Micro-Manager", description = "Micro-Manager Core Python wrapper", ext_modules = [mmcorepy_module], py_modules = ["MMCorePy"], )
#!/usr/bin/env python """ This setup.py is intended for use from the Autoconf/Automake build system. It makes a number of assumtions, including that the SWIG sources have already been generated. """ from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/Host.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', ], language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup(name='MMCorePy', version='0.1', author="Micro-Manager", description="Micro-Manager Core Python wrapper", ext_modules=[mmcorepy_module], py_modules=["MMCorePy"], )
Add MMCore/Host.cpp to Unix build.
MMCorePy: Add MMCore/Host.cpp to Unix build. Was missing. Note that build is still broken (even though it does not explicitly fail), at least on Mac OS X, because of missing libraries (IOKit, CoreFoundation, and boost.system, I think). Also removed MMDevice/Property.cpp, which is not needed here. git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@11992 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
Python
mit
kmdouglass/Micro-Manager,kmdouglass/Micro-Manager
--- +++ @@ -1,7 +1,9 @@ #!/usr/bin/env python """ -setup.py file for SWIG example +This setup.py is intended for use from the Autoconf/Automake build system. +It makes a number of assumtions, including that the SWIG sources have already +been generated. """ from distutils.core import setup, Extension @@ -15,27 +17,26 @@ mmcorepy_module = Extension('_MMCorePy', - sources=['MMCorePy_wrap.cxx', - '../MMDevice/DeviceUtils.cpp', - '../MMDevice/ImgBuffer.cpp', - '../MMDevice/Property.cpp', - '../MMCore/CircularBuffer.cpp', - '../MMCore/Configuration.cpp', - '../MMCore/CoreCallback.cpp', - '../MMCore/CoreProperty.cpp', - '../MMCore/FastLogger.cpp', - '../MMCore/MMCore.cpp', - '../MMCore/PluginManager.cpp'], - language = "c++", - extra_objects = [], + sources=['MMCorePy_wrap.cxx', + '../MMCore/CircularBuffer.cpp', + '../MMCore/Configuration.cpp', + '../MMCore/CoreCallback.cpp', + '../MMCore/CoreProperty.cpp', + '../MMCore/FastLogger.cpp', + '../MMCore/Host.cpp', + '../MMCore/MMCore.cpp', + '../MMCore/PluginManager.cpp', + '../MMDevice/DeviceUtils.cpp', + '../MMDevice/ImgBuffer.cpp', + ], + language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) -setup (name = 'MMCorePy', - version = '0.1', - author = "Micro-Manager", - description = "Micro-Manager Core Python wrapper", - ext_modules = [mmcorepy_module], - py_modules = ["MMCorePy"], - ) - +setup(name='MMCorePy', + version='0.1', + author="Micro-Manager", + description="Micro-Manager Core Python wrapper", + ext_modules=[mmcorepy_module], + py_modules=["MMCorePy"], + )
8ae6cfd75d5dc6b775aa80dcc91874c3a9ee7758
WebSphere/changeUID.py
WebSphere/changeUID.py
# Script to change the UID of Users # # Author: Christoph Stoettner # E-Mail: christoph.stoettner@stoeps.de # # example: wsadmin.sh -lang jython -f changeUID.py file.csv # # Format of CSV-File: # uid;mailaddress # import sys import os # Check OS on windows .strip('\n') is not required # Import Connections Admin Commands for Profiles execfile( "profilesAdmin.py" ) print "\nReading from file: " + sys.argv[0] myfile = open( sys.argv[1], 'r' ) for line in myfile.readlines(): if( ";" in line ) : data = line.split( ";" ) print "Working on user " + data[1] email = data[1].strip() uid = data[0].strip() ProfilesService.updateUser( str( email ), uid = str( uid ) ) ProfilesService.publishUserData( email ) print '\nDONE \n'
# Script to change the UID of Users # # Author: Christoph Stoettner # E-Mail: christoph.stoettner@stoeps.de # # example: wsadmin.sh -lang jython -f changeUID.py file.csv # # Format of CSV-File: # uid;mailaddress # don't mask strings with " # import sys import os # Check OS on windows .strip('\n') is not required # Import Connections Admin Commands for Profiles execfile( "profilesAdmin.py" ) print "\nReading from file: " + sys.argv[0] myfile = open( sys.argv[1], 'r' ) for line in myfile.readlines(): if( ";" in line ) : data = line.split( ";" ) print "Working on user " + data[1] email = data[1].strip() uid = data[0].strip() ProfilesService.updateUser( str( email ), uid = str( uid ) ) ProfilesService.publishUserData( email ) print '\nDONE \n'
Change some comments as documentation
Change some comments as documentation
Python
apache-2.0
stoeps13/ibmcnxscripting,stoeps13/ibmcnxscripting,stoeps13/ibmcnxscripting
--- +++ @@ -6,7 +6,8 @@ # example: wsadmin.sh -lang jython -f changeUID.py file.csv # # Format of CSV-File: -# uid;mailaddress +# uid;mailaddress +# don't mask strings with " # import sys import os
010444e37787582788268ccea860ae6bb97bd4b1
enthought/traits/ui/editors/date_editor.py
enthought/traits/ui/editors/date_editor.py
#------------------------------------------------------------------------------ # # Copyright (c) 2008, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! # # Author: Judah De Paula # Date: 10/7/2008 # #------------------------------------------------------------------------------ """ A Traits UI editor that wraps a WX calendar panel. """ from enthought.traits.trait_types import Bool, Int from enthought.traits.ui.editor_factory import EditorFactory #-- DateEditor definition ----------------------------------------------------- class DateEditor ( EditorFactory ): """ Editor factory for date/time editors. """ #--------------------------------------------------------------------------- # Trait definitions: #--------------------------------------------------------------------------- #-- CustomEditor traits ---------------------------------------------------- # True: Must be a List of Dates. False: A Date instance. multi_select = Bool(False) # Should users be able to pick future dates when using the CustomEditor? allow_future = Bool(True) # How many months to show at a time. months = Int(3) # How much space to put between the individual months. padding = Int(5) #-- end DateEditor definition ------------------------------------------------- #-- eof -----------------------------------------------------------------------
#------------------------------------------------------------------------------ # # Copyright (c) 2008, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! # # Author: Judah De Paula # Date: 10/7/2008 # #------------------------------------------------------------------------------ """ A Traits UI editor that wraps a WX calendar panel. """ from enthought.traits.trait_types import Bool, Int from enthought.traits.ui.editor_factory import EditorFactory #-- DateEditor definition ----------------------------------------------------- class DateEditor ( EditorFactory ): """ Editor factory for date/time editors. """ #--------------------------------------------------------------------------- # Trait definitions: #--------------------------------------------------------------------------- #-- CustomEditor traits ---------------------------------------------------- # True: Must be a List of Dates. False: Must be a Date instance. multi_select = Bool(False) # Should users be able to pick future dates when using the CustomEditor? allow_future = Bool(True) # How many months to show at a time. months = Int(3) # How much space to put between the individual months. padding = Int(5) # Does the user have to hold down Shift for the left-click multiselect? shift_to_select = Bool(False) #-- end DateEditor definition ------------------------------------------------- #-- eof -----------------------------------------------------------------------
Allow shift to be required using a factory trait.
Allow shift to be required using a factory trait.
Python
bsd-3-clause
burnpanck/traits,burnpanck/traits
--- +++ @@ -33,7 +33,7 @@ #-- CustomEditor traits ---------------------------------------------------- - # True: Must be a List of Dates. False: A Date instance. + # True: Must be a List of Dates. False: Must be a Date instance. multi_select = Bool(False) # Should users be able to pick future dates when using the CustomEditor? @@ -44,6 +44,9 @@ # How much space to put between the individual months. padding = Int(5) + + # Does the user have to hold down Shift for the left-click multiselect? + shift_to_select = Bool(False) #-- end DateEditor definition -------------------------------------------------
d3cb08d45af60aaf06757ad230a2a33bc3615543
apps/organizations/middleware.py
apps/organizations/middleware.py
from django.http import Http404 from .models import Organization class OrganizationMiddleware(object): def process_request(self, request): try: request.organization = Organization.objects.get( slug__iexact=request.subdomain ) except Organization.DoesNotExist: raise Http404
from django.http import Http404 from .models import Organization class OrganizationMiddleware(object): def process_request(self, request): if request.subdomain is None: return try: request.organization = Organization.objects.get( slug__iexact=request.subdomain ) except Organization.DoesNotExist: raise Http404
Remove subdomain check on pages where subdomain is none
Remove subdomain check on pages where subdomain is none
Python
mit
xobb1t/ddash2013,xobb1t/ddash2013
--- +++ @@ -6,6 +6,8 @@ class OrganizationMiddleware(object): def process_request(self, request): + if request.subdomain is None: + return try: request.organization = Organization.objects.get( slug__iexact=request.subdomain
cbf4d85092232051cd7643d74e003b86f24ba571
feincms/templatetags/feincms_admin_tags.py
feincms/templatetags/feincms_admin_tags.py
from django import template register = template.Library() @register.filter def post_process_fieldsets(fieldset): """ Removes a few fields from FeinCMS admin inlines, those being ``id``, ``DELETE`` and ``ORDER`` currently. """ process = fieldset.model_admin.verbose_name_plural.startswith('Feincms_Inline:') if process: # Exclude special fields and the primary key excluded_fields = ('id', 'DELETE', 'ORDER') fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields] for line in fieldset: yield line
from django import template register = template.Library() @register.filter def post_process_fieldsets(fieldset): """ Removes a few fields from FeinCMS admin inlines, those being ``id``, ``DELETE`` and ``ORDER`` currently. """ excluded_fields = ('id', 'DELETE', 'ORDER') fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields] for line in fieldset: yield line
Fix post_process_fieldsets: This filter is only called for FeinCMS inlines anyway
Fix post_process_fieldsets: This filter is only called for FeinCMS inlines anyway Thanks to mjl for the report and help in fixing the issue.
Python
bsd-3-clause
matthiask/django-content-editor,matthiask/feincms2-content,joshuajonah/feincms,matthiask/feincms2-content,matthiask/django-content-editor,mjl/feincms,feincms/feincms,nickburlett/feincms,mjl/feincms,nickburlett/feincms,feincms/feincms,pjdelport/feincms,pjdelport/feincms,mjl/feincms,nickburlett/feincms,michaelkuty/feincms,feincms/feincms,pjdelport/feincms,michaelkuty/feincms,michaelkuty/feincms,michaelkuty/feincms,joshuajonah/feincms,matthiask/django-content-editor,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,nickburlett/feincms,joshuajonah/feincms
--- +++ @@ -11,11 +11,8 @@ ``id``, ``DELETE`` and ``ORDER`` currently. """ - process = fieldset.model_admin.verbose_name_plural.startswith('Feincms_Inline:') - if process: - # Exclude special fields and the primary key - excluded_fields = ('id', 'DELETE', 'ORDER') - fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields] + excluded_fields = ('id', 'DELETE', 'ORDER') + fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields] for line in fieldset: yield line
42592f3f990ccd111244a8be90513aa4cf35f678
fireplace/cards/classic/neutral_epic.py
fireplace/cards/classic/neutral_epic.py
from ..utils import * # Big Game Hunter class EX1_005: action = [Destroy(TARGET)] # Mountain Giant class EX1_105: def cost(self, value): return value - (len(self.controller.hand) - 1) # Sea Giant class EX1_586: def cost(self, value): return value - len(self.game.board) # Blood Knight class EX1_590: def action(self): count = len(self.game.board.filter(divine_shield=True)) return [ SetTag(ALL_MINIONS, {GameTag.DIVINE_SHIELD: False}), Buff(self, "EX1_590e") * count, ] # Molten Giant class EX1_620: def cost(self, value): return value - self.controller.hero.damage # Captain's Parrot class NEW1_016: action = [ForceDraw(CONTROLLER, CONTROLLER_DECK + PIRATE)] # Hungry Crab class NEW1_017: action = [Destroy(TARGET), Buff(SELF, "NEW1_017e")] # Doomsayer class NEW1_021: events = [ OWN_TURN_BEGIN.on(Destroy(ALL_MINIONS)) ]
from ..utils import * # Big Game Hunter class EX1_005: action = [Destroy(TARGET)] # Mountain Giant class EX1_105: def cost(self, value): return value - (len(self.controller.hand) - 1) # Sea Giant class EX1_586: def cost(self, value): return value - len(self.game.board) # Blood Knight class EX1_590: action = [ Buff(SELF, "EX1_590e") * Count(ALL_MINIONS + DIVINE_SHIELD), SetTag(ALL_MINIONS, {GameTag.DIVINE_SHIELD: False}) ] # Molten Giant class EX1_620: def cost(self, value): return value - self.controller.hero.damage # Captain's Parrot class NEW1_016: action = [ForceDraw(CONTROLLER, CONTROLLER_DECK + PIRATE)] # Hungry Crab class NEW1_017: action = [Destroy(TARGET), Buff(SELF, "NEW1_017e")] # Doomsayer class NEW1_021: events = [ OWN_TURN_BEGIN.on(Destroy(ALL_MINIONS)) ]
Use Count() in Blood Knight
Use Count() in Blood Knight
Python
agpl-3.0
liujimj/fireplace,Ragowit/fireplace,jleclanche/fireplace,butozerca/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,liujimj/fireplace,Meerkov/fireplace,smallnamespace/fireplace,butozerca/fireplace,NightKev/fireplace,smallnamespace/fireplace,amw2104/fireplace,oftc-ftw/fireplace,amw2104/fireplace,beheh/fireplace,Ragowit/fireplace
--- +++ @@ -20,12 +20,10 @@ # Blood Knight class EX1_590: - def action(self): - count = len(self.game.board.filter(divine_shield=True)) - return [ - SetTag(ALL_MINIONS, {GameTag.DIVINE_SHIELD: False}), - Buff(self, "EX1_590e") * count, - ] + action = [ + Buff(SELF, "EX1_590e") * Count(ALL_MINIONS + DIVINE_SHIELD), + SetTag(ALL_MINIONS, {GameTag.DIVINE_SHIELD: False}) + ] # Molten Giant
64d6a44ecbbaa7d8ac2c79bd95827ced66254bcf
fireplace/carddata/minions/warlock.py
fireplace/carddata/minions/warlock.py
import random from ..card import * # Blood Imp class CS2_059(Card): def endTurn(self): if self.game.currentPlayer is self.owner: if self.owner.field: random.choice(self.owner.field).buff("CS2_059o") class CS2_059o(Card): health = 1 # Felguard class EX1_301(Card): def activate(self): self.owner.loseMana(1) # Succubus class EX1_306(Card): activate = discard(1) # Doomguard class EX1_310(Card): activate = discard(2) # Pit Lord class EX1_313(Card): def activate(self): self.owner.hero.damage(5) # Flame Imp class EX1_319(Card): def activate(self): self.owner.hero.damage(3)
import random from ..card import * # Blood Imp class CS2_059(Card): def endTurn(self): if self.game.currentPlayer is self.owner: if self.owner.field: random.choice(self.owner.field).buff("CS2_059o") class CS2_059o(Card): health = 1 # Felguard class EX1_301(Card): def activate(self): self.owner.loseMana(1) # Succubus class EX1_306(Card): activate = discard(1) # Doomguard class EX1_310(Card): activate = discard(2) # Pit Lord class EX1_313(Card): def activate(self): self.owner.hero.damage(5) # Flame Imp class EX1_319(Card): def activate(self): self.owner.hero.damage(3) # Lord Jaraxxus class EX1_323(Card): def activate(self): self.removeFromField() self.owner.setHero("EX1_323h")
IMPLEMENT JARAXXUS, EREDAR LORD OF THE BURNING LEGION
IMPLEMENT JARAXXUS, EREDAR LORD OF THE BURNING LEGION
Python
agpl-3.0
butozerca/fireplace,jleclanche/fireplace,amw2104/fireplace,NightKev/fireplace,liujimj/fireplace,amw2104/fireplace,smallnamespace/fireplace,butozerca/fireplace,oftc-ftw/fireplace,beheh/fireplace,Ragowit/fireplace,Meerkov/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,liujimj/fireplace,smallnamespace/fireplace,Ragowit/fireplace
--- +++ @@ -39,3 +39,10 @@ class EX1_319(Card): def activate(self): self.owner.hero.damage(3) + + +# Lord Jaraxxus +class EX1_323(Card): + def activate(self): + self.removeFromField() + self.owner.setHero("EX1_323h")
49dc93d0fd2ab58815d91aba8afc6796cf45ce98
migrations/versions/0093_data_gov_uk.py
migrations/versions/0093_data_gov_uk.py
"""empty message Revision ID: 0093_data_gov_uk Revises: 0092_add_inbound_provider Create Date: 2017-06-05 16:15:17.744908 """ # revision identifiers, used by Alembic. revision = '0093_data_gov_uk' down_revision = '0092_add_inbound_provider' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql DATA_GOV_UK_ID = '123496d4-44cb-4324-8e0a-4187101f4bdc' def upgrade(): op.execute("""INSERT INTO organisation VALUES ( '{}', '', '', '' )""".format(DATA_GOV_UK_ID)) def downgrade(): op.execute(""" DELETE FROM organisation WHERE "id" = '{}' """.format(DATA_GOV_UK_ID))
"""empty message Revision ID: 0093_data_gov_uk Revises: 0092_add_inbound_provider Create Date: 2017-06-05 16:15:17.744908 """ # revision identifiers, used by Alembic. revision = '0093_data_gov_uk' down_revision = '0092_add_inbound_provider' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql DATA_GOV_UK_ID = '123496d4-44cb-4324-8e0a-4187101f4bdc' def upgrade(): op.execute("""INSERT INTO organisation VALUES ( '{}', '', 'data_gov_uk_x2.png', '' )""".format(DATA_GOV_UK_ID)) def downgrade(): op.execute(""" DELETE FROM organisation WHERE "id" = '{}' """.format(DATA_GOV_UK_ID))
Revert "Remove name from organisation"
Revert "Remove name from organisation"
Python
mit
alphagov/notifications-api,alphagov/notifications-api
--- +++ @@ -21,7 +21,7 @@ op.execute("""INSERT INTO organisation VALUES ( '{}', '', - '', + 'data_gov_uk_x2.png', '' )""".format(DATA_GOV_UK_ID))
4e135d1e40499f06b81d4ec3b427462c9b4ba2ee
test/cli/test_cmd_piper.py
test/cli/test_cmd_piper.py
from piper import build from piper.db import core as db from piper.cli import cmd_piper from piper.cli.cli import CLIBase import mock class TestEntry(object): @mock.patch('piper.cli.cmd_piper.CLIBase') def test_calls(self, clibase): self.mock = mock.Mock() cmd_piper.entry(self.mock) clibase.assert_called_once_with( 'piper', (build.ExecCLI, db.DbCLI), args=self.mock ) clibase.return_value.entry.assert_called_once_with() @mock.patch('piper.cli.cmd_piper.CLIBase') def test_return_value(self, clibase): ret = cmd_piper.entry() assert ret is clibase.return_value.entry.return_value class TestEntryIntegration(object): def test_db_init(self): args = ['db', 'init'] cli = CLIBase('piper', (db.DbCLI,), args=args) db.DbCLI.db = mock.Mock() cli.entry() db.DbCLI.db.init.assert_called_once_with(cli.config)
from piper import build from piper.db import core as db from piper.cli import cmd_piper from piper.cli.cli import CLIBase import mock class TestEntry(object): @mock.patch('piper.cli.cmd_piper.CLIBase') def test_calls(self, clibase): self.mock = mock.Mock() cmd_piper.entry(self.mock) clibase.assert_called_once_with( 'piper', (build.ExecCLI, db.DbCLI), args=self.mock ) clibase.return_value.entry.assert_called_once_with() @mock.patch('piper.cli.cmd_piper.CLIBase') def test_return_value(self, clibase): ret = cmd_piper.entry() assert ret is clibase.return_value.entry.return_value class TestEntryIntegration(object): def test_db_init(self): args = ['db', 'init'] cli = CLIBase('piper', (db.DbCLI,), args=args) db.DbCLI.db = mock.Mock() cli.entry() db.DbCLI.db.init.assert_called_once_with(cli.config) @mock.patch('piper.build.Build.run') def test_exec(self, run): args = ['exec'] cli = CLIBase('piper', (build.ExecCLI,), args=args) cli.entry() run.assert_called_once_with()
Add short integration test for ExecCLI.build
Add short integration test for ExecCLI.build
Python
mit
thiderman/piper
--- +++ @@ -32,3 +32,11 @@ db.DbCLI.db = mock.Mock() cli.entry() db.DbCLI.db.init.assert_called_once_with(cli.config) + + @mock.patch('piper.build.Build.run') + def test_exec(self, run): + args = ['exec'] + cli = CLIBase('piper', (build.ExecCLI,), args=args) + + cli.entry() + run.assert_called_once_with()
cfd2312ae81dd79832d4b03717278a79bc8705d1
brte/converters/btf.py
brte/converters/btf.py
if 'imported' in locals(): import imp import bpy imp.reload(blendergltf) else: imported = True from . import blendergltf import json import math import bpy class BTFConverter: def convert(self, add_delta, update_delta, remove_delta, view_delta): for key, value in update_delta.items(): if value: add_delta[key] = value data = blendergltf.export_gltf(add_delta) self.export_view(view_delta, data) return data def export_view(self, view_delta, gltf): if 'extras' not in gltf: gltf['extras'] = {} if 'viewport' in view_delta: gltf['extras']['view'] = { 'width' : view_delta['viewport'].width, 'height' : view_delta['viewport'].width, 'projection_matrix': view_delta['projection_matrix'], 'view_matrix': view_delta['view_matrix'], }
if 'imported' in locals(): import imp import bpy imp.reload(blendergltf) else: imported = True from . import blendergltf import json import math import bpy def togl(matrix): return [i for col in matrix.col for i in col] class BTFConverter: def convert(self, add_delta, update_delta, remove_delta, view_delta): for key, value in update_delta.items(): if value: add_delta[key] = value data = blendergltf.export_gltf(add_delta) self.export_view(view_delta, data) return data def export_view(self, view_delta, gltf): if 'extras' not in gltf: gltf['extras'] = {} if 'viewport' in view_delta: gltf['extras']['view'] = { 'width' : view_delta['viewport'].width, 'height' : view_delta['viewport'].width, 'projection_matrix': togl(view_delta['projection_matrix']), 'view_matrix': togl(view_delta['view_matrix']), }
Fix JSON serialization issue with view and projection matrices
Fix JSON serialization issue with view and projection matrices
Python
mit
Kupoman/BlenderRealtimeEngineAddon
--- +++ @@ -11,6 +11,10 @@ import math import bpy + + +def togl(matrix): + return [i for col in matrix.col for i in col] class BTFConverter: @@ -33,6 +37,6 @@ gltf['extras']['view'] = { 'width' : view_delta['viewport'].width, 'height' : view_delta['viewport'].width, - 'projection_matrix': view_delta['projection_matrix'], - 'view_matrix': view_delta['view_matrix'], + 'projection_matrix': togl(view_delta['projection_matrix']), + 'view_matrix': togl(view_delta['view_matrix']), }
d7a434de8926162adeff6e6934dde7eb771baf8d
custom/enikshay/reports/views.py
custom/enikshay/reports/views.py
from django.http.response import JsonResponse from django.utils.decorators import method_decorator from django.views.generic.base import View from corehq.apps.domain.decorators import login_and_domain_required from corehq.apps.locations.permissions import location_safe from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider from custom.enikshay.reports.utils import StubReport @location_safe class LocationsView(View): @method_decorator(login_and_domain_required) def dispatch(self, *args, **kwargs): return super(LocationsView, self).dispatch(*args, **kwargs) def get(self, request, domain, *args, **kwargs): user = self.request.couch_user query_context = ChoiceQueryContext( query=request.GET.get('q', None), limit=int(request.GET.get('limit', 20)), page=int(request.GET.get('page', 1)) - 1, user=user ) location_choice_provider = LocationChoiceProvider(StubReport(domain=domain), None) location_choice_provider.configure({'include_descendants': True}) return JsonResponse( { 'results': [ {'id': location.value, 'text': location.display} for location in location_choice_provider.query(query_context) ], 'total': location_choice_provider.query_count(query_context, user) } )
from django.http.response import JsonResponse from django.utils.decorators import method_decorator from django.views.generic.base import View from corehq.apps.domain.decorators import login_and_domain_required from corehq.apps.locations.permissions import location_safe from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider from custom.enikshay.reports.utils import StubReport @location_safe class LocationsView(View): @method_decorator(login_and_domain_required) def dispatch(self, *args, **kwargs): return super(LocationsView, self).dispatch(*args, **kwargs) def get(self, request, domain, *args, **kwargs): user = self.request.couch_user query_context = ChoiceQueryContext( query=request.GET.get('q', None), limit=int(request.GET.get('limit', 20)), page=int(request.GET.get('page', 1)) - 1, user=user ) location_choice_provider = LocationChoiceProvider(StubReport(domain=domain), None) location_choice_provider.configure({'include_descendants': True}) return JsonResponse( { 'results': [ {'id': location.value, 'text': location.display} for location in location_choice_provider.query(query_context) ], 'total': location_choice_provider.query_count(query_context.query, user) } )
Fix pagination of locations filter
Fix pagination of locations filter
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
--- +++ @@ -32,6 +32,6 @@ {'id': location.value, 'text': location.display} for location in location_choice_provider.query(query_context) ], - 'total': location_choice_provider.query_count(query_context, user) + 'total': location_choice_provider.query_count(query_context.query, user) } )
efca073ec2c15bb2d60c5c8b1e675dcdcbc51ed1
fade/fade/__init__.py
fade/fade/__init__.py
#!/usr/bin/env python """ See LICENSE.txt file for copyright and license details. """ from flask import Flask from app import views import sys app = Flask(__name__) app.config.from_object('config') def adjust_system_path(): """ Adjust the system path, so we can search in custom dirs for modules. """ sys.path.append('fade/') sys.path.append('fade/static/') sys.path.append('fade/static/img/') sys.path.append('fade/static/js/') sys.path.append('fade/static/css/') sys.path.append('fade/templates/') sys.path.append('instance/') if __name__ == '__main__': adjust_system_path() app.run(debug=True)
Move to run.py at higher level.
Move to run.py at higher level.
Python
bsd-3-clause
rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python
--- +++ @@ -1,29 +1 @@ -#!/usr/bin/env python -""" - See LICENSE.txt file for copyright and license details. -""" -from flask import Flask -from app import views -import sys - -app = Flask(__name__) -app.config.from_object('config') - - -def adjust_system_path(): - """ - Adjust the system path, so we can search in custom dirs for modules. - """ - sys.path.append('fade/') - sys.path.append('fade/static/') - sys.path.append('fade/static/img/') - sys.path.append('fade/static/js/') - sys.path.append('fade/static/css/') - sys.path.append('fade/templates/') - sys.path.append('instance/') - - -if __name__ == '__main__': - adjust_system_path() - app.run(debug=True)
097d2b7b3de6593a0aac8e82418c0dcadc299542
tests/unit/test_context.py
tests/unit/test_context.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.common import context from tests import utils class ContextTest(utils.BaseTestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx) def test_admin_context_show_deleted_flag_default(self): ctx = context.get_admin_context() self.assertFalse(ctx.show_deleted)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.common import context from openstack.common import test class ContextTest(test.BaseTestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx) def test_admin_context_show_deleted_flag_default(self): ctx = context.get_admin_context() self.assertFalse(ctx.show_deleted)
Replace using tests.utils with openstack.common.test
Replace using tests.utils with openstack.common.test It is the first step to replace using tests.utils with openstack.common.test. All these tests don't use mock objects, stubs, config files and use only BaseTestCase class. Change-Id: I511816b5c9e6c5c34ebff199296ee4fc8b84c672 bp: common-unit-tests
Python
apache-2.0
openstack/oslo.context,dims/oslo.context,varunarya10/oslo.context,yanheven/oslo.middleware,JioCloud/oslo.context,citrix-openstack-build/oslo.context
--- +++ @@ -16,10 +16,10 @@ # under the License. from openstack.common import context -from tests import utils +from openstack.common import test -class ContextTest(utils.BaseTestCase): +class ContextTest(test.BaseTestCase): def test_context(self): ctx = context.RequestContext()
2057b76908c8875fd58d3027a89e43584ee57025
service/opencv.py
service/opencv.py
__author__ = 'paulo' import cv2 class OpenCVIntegration(object): @staticmethod def adaptive_threshold(filename_in, filename_out): img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) th = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY,11,2) params = [cv2.cv.CV_IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, th, params) @staticmethod def grayscale(filename_in, filename_out): img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) params = [cv2.cv.CV_IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, img, params)
__author__ = 'paulo' import cv2 class OpenCVIntegration(object): @staticmethod def adaptive_threshold(filename_in, filename_out): img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) th = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY,11,2) params = [cv2.IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, th, params) @staticmethod def grayscale(filename_in, filename_out): img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) params = [cv2.IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, img, params)
Fix IMWRITE_JPEG_QUALITY ref for the newest OpenCV
Fix IMWRITE_JPEG_QUALITY ref for the newest OpenCV
Python
mit
nfscan/ocr-process-service,nfscan/ocr-process-service,PauloMigAlmeida/ocr-process-service,PauloMigAlmeida/ocr-process-service
--- +++ @@ -11,12 +11,12 @@ img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) th = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY,11,2) - params = [cv2.cv.CV_IMWRITE_JPEG_QUALITY, 100] + params = [cv2.IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, th, params) @staticmethod def grayscale(filename_in, filename_out): img = cv2.imread(filename_in) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) - params = [cv2.cv.CV_IMWRITE_JPEG_QUALITY, 100] + params = [cv2.IMWRITE_JPEG_QUALITY, 100] cv2.imwrite(filename_out, img, params)
0562f34398041e6696f6e9bad56ea2a135968b77
paratemp/sim_setup/pt_simulation.py
paratemp/sim_setup/pt_simulation.py
"""This contains code for setting up parallel tempering calcs""" ######################################################################## # # # This script was written by Thomas Heavey in 2019. # # theavey@bu.edu thomasjheavey@gmail.com # # # # Copyright 2019 Thomas J. Heavey IV # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # # implied. # # See the License for the specific language governing permissions and # # limitations under the License. # # # ######################################################################## from .simulation import Simulation class PTSimulation(Simulation): def __init__(self, *args, **kwargs): super(Simulation, self).__init__(*args, **kwargs)
"""This contains code for setting up parallel tempering calcs""" ######################################################################## # # # This script was written by Thomas Heavey in 2019. # # theavey@bu.edu thomasjheavey@gmail.com # # # # Copyright 2019 Thomas J. Heavey IV # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # # implied. # # See the License for the specific language governing permissions and # # limitations under the License. # # # ######################################################################## from .simulation import Simulation class PTSimulation(Simulation): def __init__(self, *args, **kwargs): super(PTSimulation, self).__init__(*args, **kwargs)
Fix typo in inheritence/super call
Fix typo in inheritence/super call
Python
apache-2.0
theavey/ParaTemp,theavey/ParaTemp
--- +++ @@ -28,4 +28,4 @@ class PTSimulation(Simulation): def __init__(self, *args, **kwargs): - super(Simulation, self).__init__(*args, **kwargs) + super(PTSimulation, self).__init__(*args, **kwargs)
1c6b190a91823a6731083b3e8a415277e0352e19
python_apps/airtime_analyzer/setup.py
python_apps/airtime_analyzer/setup.py
import os from setuptools import setup # Change directory since setuptools uses relative paths os.chdir(os.path.dirname(os.path.realpath(__file__))) setup( name="libretime-analyzer", version="0.1", description="Libretime Analyzer Worker and File Importer", url="https://libretime.org", author="LibreTime Contributors", license="AGPLv3", packages=["airtime_analyzer"], entry_points={ "console_scripts": [ "libretime-analyzer=airtime_analyzer.cli:main", ] }, install_requires=[ "mutagen==1.42.0", "pika~=1.1.0", "file-magic", "requests>=2.7.0", "rgain3==1.0.0", "pycairo==1.19.1", ], zip_safe=False, )
import os from setuptools import setup # Change directory since setuptools uses relative paths os.chdir(os.path.dirname(os.path.realpath(__file__))) setup( name="libretime-analyzer", version="0.1", description="Libretime Analyzer Worker and File Importer", url="https://libretime.org", author="LibreTime Contributors", license="AGPLv3", packages=["airtime_analyzer"], entry_points={ "console_scripts": [ "libretime-analyzer=airtime_analyzer.cli:main", ] }, install_requires=[ "mutagen==1.42.0", "pika>=1.0.0", "file-magic", "requests>=2.7.0", "rgain3==1.0.0", "pycairo==1.19.1", ], zip_safe=False, )
Update pika requirement in /python_apps/airtime_analyzer
Update pika requirement in /python_apps/airtime_analyzer Updates the requirements on [pika](https://github.com/pika/pika) to permit the latest version. - [Release notes](https://github.com/pika/pika/releases) - [Commits](https://github.com/pika/pika/compare/1.1.0...1.2.0) --- updated-dependencies: - dependency-name: pika dependency-type: direct:production ... Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
Python
agpl-3.0
LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime
--- +++ @@ -20,7 +20,7 @@ }, install_requires=[ "mutagen==1.42.0", - "pika~=1.1.0", + "pika>=1.0.0", "file-magic", "requests>=2.7.0", "rgain3==1.0.0",
e230837f473808b3c136862feefdd6660ebb77e8
scripts/examples/14-WiFi-Shield/mqtt.py
scripts/examples/14-WiFi-Shield/mqtt.py
# MQTT Example. # This example shows how to use the MQTT library. # # 1) Copy the mqtt.py library to OpenMV storage. # 2) Install the mosquitto client on PC and run the following command: # mosquitto_sub -h test.mosquitto.org -t "openmv/test" -v # import time, network from mqtt import MQTTClient SSID='mux' # Network SSID KEY='j806fVnT7tObdCYE' # Network key # Init wlan module and connect to network print("Trying to connect... (may take a while)...") wlan = network.WINC() wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK) # We should have a valid IP now via DHCP print(wlan.ifconfig()) client = MQTTClient("openmv", "test.mosquitto.org", port=1883) client.connect() while (True): client.publish("openmv/test", "Hello World!") time.sleep(1000)
# MQTT Example. # This example shows how to use the MQTT library. # # 1) Copy the mqtt.py library to OpenMV storage. # 2) Install the mosquitto client on PC and run the following command: # mosquitto_sub -h test.mosquitto.org -t "openmv/test" -v # import time, network from mqtt import MQTTClient SSID='' # Network SSID KEY='' # Network key # Init wlan module and connect to network print("Trying to connect... (may take a while)...") wlan = network.WINC() wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK) # We should have a valid IP now via DHCP print(wlan.ifconfig()) client = MQTTClient("openmv", "test.mosquitto.org", port=1883) client.connect() while (True): client.publish("openmv/test", "Hello World!") time.sleep(1000)
Remove ssid/key from example script.
Remove ssid/key from example script.
Python
mit
kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,openmv/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,kwagyeman/openmv,openmv/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv
--- +++ @@ -8,8 +8,8 @@ import time, network from mqtt import MQTTClient -SSID='mux' # Network SSID -KEY='j806fVnT7tObdCYE' # Network key +SSID='' # Network SSID +KEY='' # Network key # Init wlan module and connect to network print("Trying to connect... (may take a while)...")
e0c3fe2b1ecb4caf33b9ba3dafabe4eedae97c5e
spiralgalaxygame/tests/test_sentinel.py
spiralgalaxygame/tests/test_sentinel.py
import unittest from spiralgalaxygame.sentinel import Sentinel, Enum class SentinelTests (unittest.TestCase): def setUp(self): self.s = Sentinel('thingy') def test_name(self): self.assertIs(self.s.name, 'thingy') def test_repr(self): self.assertEqual(repr(self.s), '<Sentinel thingy>') def test_identity_by_name(self): s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning. self.assertIs(s2, self.s) class EnumTests (unittest.TestCase): def setUp(self): self.e = Enum('red', 'green', 'blue') def test_iter_and_members_are_sentinels(self): for member in self.e: self.assertIsInstance(member, Sentinel) def test_member_as_attr_and_in_operator(self): self.assertIn(self.e.green, self.e)
import unittest from spiralgalaxygame.sentinel import Sentinel, Enum class SentinelTests (unittest.TestCase): def setUp(self): self.s = Sentinel('thingy') def test_name(self): self.assertIs(self.s.name, 'thingy') def test_repr(self): self.assertEqual(repr(self.s), '<Sentinel thingy>') def test_identity_by_name(self): s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning. self.assertIs(s2, self.s) class EnumTests (unittest.TestCase): def setUp(self): self.e = Enum('red', 'green', 'blue') def test_repr(self): self.assertEqual(repr(self.e), '<Enum blue, green, red>') def test_iter_and_members_are_sentinels(self): for member in self.e: self.assertIsInstance(member, Sentinel) def test_member_as_attr_and_in_operator(self): self.assertIn(self.e.green, self.e)
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
Python
agpl-3.0
nejucomo/sgg,nejucomo/sgg,nejucomo/sgg
--- +++ @@ -22,6 +22,9 @@ def setUp(self): self.e = Enum('red', 'green', 'blue') + def test_repr(self): + self.assertEqual(repr(self.e), '<Enum blue, green, red>') + def test_iter_and_members_are_sentinels(self): for member in self.e: self.assertIsInstance(member, Sentinel)
56c3a5e4b29fb5f198f133e06e92ab5af72d62dd
conda_tools/package.py
conda_tools/package.py
import tarfile import os import json class Package(object): def __init__(self, path, mode='r'): self.path = path self.mode = mode self._tarfile = tarfile.open(path, mode=mode)
import tarfile import json from pathlib import PurePath, PureWindowsPat from os.path import realpath, abspath, join from hashlib import md5 try: from collections.abc import Iterable except ImportError: from collections import Iterable from .common import lazyproperty class BadLinkError(Exception): pass class Package(object): """ A very thin wrapper around tarfile objects. A convenience class specifically tailored to conda archives. This class is intended for read-only access. """ def __init__(self, path): self.path = path self.mode = mode self._tarfile = tarfile.open(path, mode='r') @lazyproperty def hash(self): h = md5() blocksize = h.block_size with open(self.path, 'rb') as hin: h.update(hin.read(blocksize)) return h.hexdigest() def files(self): return self._tarfile.getmembers() def extract(self, members, destination='.'): """ Extract tarfile member to destination. This method does some basic sanitation of the member. """ self._tarfile.extractall(path=destination, members=sane_members(members)) def sane_members(members): resolve = lambda path: realpath(normpath(join(destination, path))) destination = PurePath(destination) for member in members: mpath = PurePath(resolve(member.path)) # Check if mpath is under destination if destination not in mpath.parents: raise BadPathError("Bad path to outside destination directory: {}".format(mpath)) elif m.issym() or m.islnk(): # Check link to make sure it resolves under destination lnkpath = PurePath(m.linkpath) if lnkpath.is_absolute() or lnkpath.is_reserved(): raise BadLinkError("Bad link: {}".format(lnkpath)) # resolve the link to an absolute path lnkpath = PurePath(resolve(lnkpath)) if destination not in lnkpath.parents: raise BadLinkError("Bad link to outside destination directory: {}".format(cpath)) yield member
Add pacakge object for working with conda compressed archives.
Add pacakge object for working with conda compressed archives.
Python
bsd-3-clause
groutr/conda-tools,groutr/conda-tools
--- +++ @@ -1,12 +1,73 @@ import tarfile -import os import json +from pathlib import PurePath, PureWindowsPat +from os.path import realpath, abspath, join +from hashlib import md5 + +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable + +from .common import lazyproperty + +class BadLinkError(Exception): + pass class Package(object): + """ + A very thin wrapper around tarfile objects. - def __init__(self, path, mode='r'): + A convenience class specifically tailored to conda archives. + This class is intended for read-only access. + """ + def __init__(self, path): self.path = path self.mode = mode - self._tarfile = tarfile.open(path, mode=mode) + self._tarfile = tarfile.open(path, mode='r') + @lazyproperty + def hash(self): + h = md5() + blocksize = h.block_size + + with open(self.path, 'rb') as hin: + h.update(hin.read(blocksize)) + return h.hexdigest() + + def files(self): + return self._tarfile.getmembers() + + def extract(self, members, destination='.'): + """ + Extract tarfile member to destination. + + This method does some basic sanitation of the member. + """ + self._tarfile.extractall(path=destination, members=sane_members(members)) + + +def sane_members(members): + resolve = lambda path: realpath(normpath(join(destination, path))) + + destination = PurePath(destination) + + for member in members: + mpath = PurePath(resolve(member.path)) + + # Check if mpath is under destination + if destination not in mpath.parents: + raise BadPathError("Bad path to outside destination directory: {}".format(mpath)) + elif m.issym() or m.islnk(): + # Check link to make sure it resolves under destination + lnkpath = PurePath(m.linkpath) + if lnkpath.is_absolute() or lnkpath.is_reserved(): + raise BadLinkError("Bad link: {}".format(lnkpath)) + + # resolve the link to an absolute path + lnkpath = PurePath(resolve(lnkpath)) + if destination not in lnkpath.parents: + raise BadLinkError("Bad link to outside destination directory: {}".format(cpath)) + + yield member
70be059480af2eaac8a0b7afdd91f7ca86b94c05
src/azure/cli/commands/resourcegroup.py
src/azure/cli/commands/resourcegroup.py
from .._util import TableOutput from ..commands import command, description, option from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes #@option('--tag-name -g <tagName>', _("the resource group's tag name")) #@option('--tag-value -g <tagValue>', _("the resource group's tag value")) #@option('--top -g <number>', _("Top N resource groups to retrieve")) def list_groups(args, unexpected): from azure.mgmt.resource.resources import ResourceManagementClient, ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient(ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() with TableOutput() as to: for grp in groups: assert isinstance(grp, ResourceGroup) to.cell('Name', grp.name) to.cell('Type', grp.properties) to.cell('Location', grp.location) to.cell('Tags', grp.tags) to.end_row() if not to.any_rows: print('No resource groups defined')
from msrest import Serializer from ..commands import command, description, option from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes #@option('--tag-name -g <tagName>', _("the resource group's tag name")) #@option('--tag-value -g <tagValue>', _("the resource group's tag value")) #@option('--top -g <number>', _("Top N resource groups to retrieve")) def list_groups(args, unexpected): from azure.mgmt.resource.resources import ResourceManagementClient, ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient(ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
Return a serialized object instead of creating the table output in the command
Return a serialized object instead of creating the table output in the command
Python
mit
yugangw-msft/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli
--- +++ @@ -1,4 +1,5 @@ -from .._util import TableOutput +from msrest import Serializer + from ..commands import command, description, option from .._profile import Profile @@ -19,14 +20,5 @@ #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() - with TableOutput() as to: - for grp in groups: - assert isinstance(grp, ResourceGroup) - to.cell('Name', grp.name) - to.cell('Type', grp.properties) - to.cell('Location', grp.location) - to.cell('Tags', grp.tags) - to.end_row() - if not to.any_rows: - print('No resource groups defined') - + serializable = Serializer().serialize_data(groups, "[ResourceGroup]") + return serializable
afdfcef3cf7f390dd0fc7eac0806272742ffa479
core/models/payment.py
core/models/payment.py
from django.db import models from django.db.models import signals from django.utils.translation import ugettext_lazy as _ from core.mixins import AuditableMixin from .invoice import Invoice class Payment(AuditableMixin, models.Model): invoice = models.ForeignKey( Invoice, editable=False, on_delete=models.CASCADE, verbose_name=_("Invoice") ) description = models.TextField( blank=True, null=True, verbose_name=_("Description") ) total = models.DecimalField( max_digits=12, decimal_places=2, verbose_name=_("Total") ) class Meta: ordering = ['-date_creation', ] verbose_name = _("Payment") verbose_name_plural = _("Payments") def __str__(self): return "%.2f" % self.total def get_absolute_url(self): return self.parent.get_absolute_url() @property def company(self): return self.parent.company @property def parent(self): return self.invoice def post_save_payment(instance, sender, created, **kwargs): if instance.invoice.is_payed and not instance.company.is_active: instance.company.activate() signals.post_save.connect(post_save_payment, sender=Payment)
from django.db import models from django.db.models import signals from django.utils.translation import ugettext_lazy as _ from core.mixins import AuditableMixin class Payment(AuditableMixin, models.Model): invoice = models.ForeignKey( 'core.Invoice', editable=False, on_delete=models.CASCADE, verbose_name=_("Invoice") ) description = models.TextField( blank=True, null=True, verbose_name=_("Description") ) total = models.DecimalField( max_digits=12, decimal_places=2, verbose_name=_("Total") ) class Meta: ordering = ['-date_creation', ] verbose_name = _("Payment") verbose_name_plural = _("Payments") def __str__(self): return "%.2f" % self.total def get_absolute_url(self): return self.parent.get_absolute_url() @property def company(self): return self.parent.company @property def parent(self): return self.invoice def post_save_payment(instance, sender, created, **kwargs): if instance.invoice.is_paid and not instance.company.is_active: instance.company.activate() signals.post_save.connect(post_save_payment, sender=Payment)
Use string instead of class
Use string instead of class
Python
bsd-3-clause
ikcam/django-skeleton,ikcam/django-skeleton,ikcam/django-skeleton,ikcam/django-skeleton
--- +++ @@ -3,12 +3,11 @@ from django.utils.translation import ugettext_lazy as _ from core.mixins import AuditableMixin -from .invoice import Invoice class Payment(AuditableMixin, models.Model): invoice = models.ForeignKey( - Invoice, editable=False, on_delete=models.CASCADE, + 'core.Invoice', editable=False, on_delete=models.CASCADE, verbose_name=_("Invoice") ) description = models.TextField( @@ -39,7 +38,7 @@ def post_save_payment(instance, sender, created, **kwargs): - if instance.invoice.is_payed and not instance.company.is_active: + if instance.invoice.is_paid and not instance.company.is_active: instance.company.activate()
55a330149a05c456012f2570c2151a82ac8435b2
images/singleuser/user-config.py
images/singleuser/user-config.py
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] del fam # If OAuth integration is available, take it if 'CLIENT_ID' in os.environ: authenticate['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] )
Revert to previous way of setting 'authenticate'
Revert to previous way of setting 'authenticate' This reverts commit a055f97a342f670171f30095cabfd4ba1bfdad17. This reverts commit 4cec5250a3f9058fea5af5ef432a5b230ca94963.
Python
mit
yuvipanda/paws,yuvipanda/paws
--- +++ @@ -20,16 +20,15 @@ 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): - usernames[fam]['*'] = os.environ['USER'] - if 'ACCESS_KEY' in os.environ: - # If OAuth integration is available, take it - authenticate.setdefault(fam, {})['*'] = ( - os.environ['CLIENT_ID'], - os.environ['CLIENT_SECRET'], - os.environ['ACCESS_KEY'], - os.environ['ACCESS_SECRET'] - ) - del fam + +# If OAuth integration is available, take it +if 'CLIENT_ID' in os.environ: + authenticate['*'] = ( + os.environ['CLIENT_ID'], + os.environ['CLIENT_SECRET'], + os.environ['ACCESS_KEY'], + os.environ['ACCESS_SECRET'] + )
208c4d9e18301a85bca5d64a1e2abb95a6865fe9
students/psbriant/session08/circle.py
students/psbriant/session08/circle.py
""" Name: Paul Briant Date: 11/29/16 Class: Introduction to Python Session: 08 Assignment: Circle Lab Description: Classes for Circle Lab """ import math class Circle: def __init__(self, radius): """ """ self.radius = radius self.diameter = radius * 2 @classmethod def from_diameter(cls, diameter): self = cls(diameter / 2) return self def __str__(self): return "A circle object with radius: {}".format(self.radius) def __repr__(self): return "Circle({})".format(self.radius) @property def diameter(self): return self.radius * 2 @diameter.setter def diameter(self, value): self.radius = value / 2 @property def area(self): return (self.radius ** 2) * math.pi
""" Name: Paul Briant Date: 11/29/16 Class: Introduction to Python Session: 08 Assignment: Circle Lab Description: Classes for Circle Lab """ import math class Circle: def __init__(self, radius): """ Initialize circle attributes radius and diameter""" self.radius = radius self.diameter = radius * 2 @classmethod def from_diameter(cls, diameter): self = cls(diameter / 2) return self def __str__(self): return "A circle object with radius: {}".format(self.radius) def __repr__(self): return "Circle({})".format(self.radius) @property def diameter(self): """ Calculate diameter from radius""" return self.radius * 2 @diameter.setter def diameter(self, value): """ Calculate radius from diameter""" self.radius = value / 2 @property def area(self): """ Calculate area using radius and pi""" return (self.radius ** 2) * math.pi
Add Docstrings for class methods.
Add Docstrings for class methods.
Python
unlicense
weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016
--- +++ @@ -11,17 +11,17 @@ import math + class Circle: def __init__(self, radius): - """ - - """ + """ Initialize circle attributes radius and diameter""" self.radius = radius self.diameter = radius * 2 @classmethod def from_diameter(cls, diameter): + self = cls(diameter / 2) return self @@ -33,12 +33,15 @@ @property def diameter(self): + """ Calculate diameter from radius""" return self.radius * 2 @diameter.setter def diameter(self, value): + """ Calculate radius from diameter""" self.radius = value / 2 @property def area(self): + """ Calculate area using radius and pi""" return (self.radius ** 2) * math.pi
4dd488634aa030a8e9a1404e5fe026265dd07a75
tailor/tests/utils/charformat_test.py
tailor/tests/utils/charformat_test.py
import unittest from tailor.utils import charformat class MyTestCase(unittest.TestCase): def is_upper_camel_case_test_upper_camel_case_name(self): self.assertTrue(charformat.is_upper_camel_case('HelloWorld')) def is_upper_camel_case_test_lower_camel_case_name(self): self.assertFalse(charformat.is_upper_camel_case('helloWorld')) def is_upper_camel_case_test_blank_name(self): self.assertFalse(charformat.is_upper_camel_case('')) def is_upper_camel_case_test_snake_case_name(self): self.assertFalse(charformat.is_upper_camel_case('Hello_World')) if __name__ == '__main__': unittest.main()
import unittest from tailor.utils import charformat class MyTestCase(unittest.TestCase): def is_upper_camel_case_test_upper_camel_case_name(self): self.assertTrue(charformat.is_upper_camel_case('HelloWorld')) def is_upper_camel_case_test_lower_camel_case_name(self): self.assertFalse(charformat.is_upper_camel_case('helloWorld')) def is_upper_camel_case_test_blank_name(self): self.assertFalse(charformat.is_upper_camel_case('')) def is_upper_camel_case_test_snake_case_name(self): self.assertFalse(charformat.is_upper_camel_case('Hello_World')) def is_upper_camel_case_test_numeric_name(self): self.assertFalse(charformat.is_upper_camel_case('1ello_world')) if __name__ == '__main__': unittest.main()
Add numeric name test case
Add numeric name test case
Python
mit
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
--- +++ @@ -16,6 +16,8 @@ def is_upper_camel_case_test_snake_case_name(self): self.assertFalse(charformat.is_upper_camel_case('Hello_World')) + def is_upper_camel_case_test_numeric_name(self): + self.assertFalse(charformat.is_upper_camel_case('1ello_world')) if __name__ == '__main__': unittest.main()