commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
94124a65b5aa540f9f997dfcdbd856207d011555
|
wafer/conf_registration/models.py
|
wafer/conf_registration/models.py
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
Make requirements optional. Add help text and fix display in admin form
|
Make requirements optional. Add help text and fix display in admin form
|
Python
|
isc
|
CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
Make requirements optional. Add help text and fix display in admin form
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
<commit_msg>Make requirements optional. Add help text and fix display in admin form<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
Make requirements optional. Add help text and fix display in admin formfrom django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
<commit_msg>Make requirements optional. Add help text and fix display in admin form<commit_after>from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
4e67491bda3204d449c540fa80cbbb8ab73921dd
|
wagtail/wagtailadmin/edit_bird.py
|
wagtail/wagtailadmin/edit_bird.py
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return
# Don't render if the user doesn't have permission to edit this page
permission_checker = self.page.permissions_for_user(request.user)
if not permission_checker.can_edit():
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page option
|
Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page option
|
Python
|
bsd-3-clause
|
rjsproxy/wagtail,jnns/wagtail,taedori81/wagtail,thenewguy/wagtail,willcodefortea/wagtail,serzans/wagtail,inonit/wagtail,tangentlabs/wagtail,rsalmaso/wagtail,chimeno/wagtail,Toshakins/wagtail,jorge-marques/wagtail,wagtail/wagtail,quru/wagtail,kurtw/wagtail,rv816/wagtail,mayapurmedia/wagtail,darith27/wagtail,marctc/wagtail,torchbox/wagtail,mayapurmedia/wagtail,iansprice/wagtail,rv816/wagtail,zerolab/wagtail,Pennebaker/wagtail,kaedroho/wagtail,tangentlabs/wagtail,davecranwell/wagtail,benemery/wagtail,lojack/wagtail,benemery/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,mjec/wagtail,quru/wagtail,FlipperPA/wagtail,serzans/wagtail,kurtrwall/wagtail,iansprice/wagtail,wagtail/wagtail,taedori81/wagtail,rv816/wagtail,Tivix/wagtail,thenewguy/wagtail,janusnic/wagtail,zerolab/wagtail,gogobook/wagtail,nimasmi/wagtail,takeshineshiro/wagtail,chimeno/wagtail,jnns/wagtail,janusnic/wagtail,nrsimha/wagtail,nealtodd/wagtail,gogobook/wagtail,iho/wagtail,wagtail/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,nutztherookie/wagtail,mjec/wagtail,inonit/wagtail,FlipperPA/wagtail,dresiu/wagtail,hamsterbacke23/wagtail,mayapurmedia/wagtail,Klaudit/wagtail,iho/wagtail,m-sanders/wagtail,takeflight/wagtail,bjesus/wagtail,jordij/wagtail,gasman/wagtail,mephizzle/wagtail,KimGlazebrook/wagtail-experiment,nutztherookie/wagtail,benjaoming/wagtail,willcodefortea/wagtail,benjaoming/wagtail,rv816/wagtail,mephizzle/wagtail,timorieber/wagtail,jordij/wagtail,kaedroho/wagtail,willcodefortea/wagtail,jorge-marques/wagtail,mixxorz/wagtail,WQuanfeng/wagtail,takeshineshiro/wagtail,jnns/wagtail,gasman/wagtail,helenwarren/pied-wagtail,serzans/wagtail,hanpama/wagtail,JoshBarr/wagtail,m-sanders/wagtail,100Shapes/wagtail,nealtodd/wagtail,darith27/wagtail,nutztherookie/wagtail,gasman/wagtail,willcodefortea/wagtail,Klaudit/wagtail,bjesus/wagtail,KimGlazebrook/wagtail-experiment,kaedroho/wagtail,takeshineshiro/wagtail,marctc/wagtail,stevenewey/wagtail,dresiu/wagtail,mikedingjan/wagtail,helenwarren/pied-wagtail,m-sanders/wagtail,chimeno/wagtail,rsalmaso/wagtail,jorge-marques/wagtail,mayapurmedia/wagtail,inonit/wagtail,kurtw/wagtail,WQuanfeng/wagtail,davecranwell/wagtail,nealtodd/wagtail,mixxorz/wagtail,nilnvoid/wagtail,KimGlazebrook/wagtail-experiment,chrxr/wagtail,chrxr/wagtail,bjesus/wagtail,KimGlazebrook/wagtail-experiment,benemery/wagtail,tangentlabs/wagtail,dresiu/wagtail,Pennebaker/wagtail,m-sanders/wagtail,thenewguy/wagtail,Pennebaker/wagtail,iho/wagtail,Tivix/wagtail,timorieber/wagtail,rsalmaso/wagtail,thenewguy/wagtail,iho/wagtail,takeflight/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,kaedroho/wagtail,Klaudit/wagtail,jorge-marques/wagtail,taedori81/wagtail,tangentlabs/wagtail,takeflight/wagtail,bjesus/wagtail,nrsimha/wagtail,gogobook/wagtail,takeflight/wagtail,Pennebaker/wagtail,quru/wagtail,stevenewey/wagtail,marctc/wagtail,torchbox/wagtail,stevenewey/wagtail,mephizzle/wagtail,jordij/wagtail,janusnic/wagtail,torchbox/wagtail,timorieber/wagtail,benjaoming/wagtail,nrsimha/wagtail,torchbox/wagtail,wagtail/wagtail,mikedingjan/wagtail,darith27/wagtail,helenwarren/pied-wagtail,inonit/wagtail,iansprice/wagtail,wagtail/wagtail,stevenewey/wagtail,Klaudit/wagtail,nrsimha/wagtail,kurtw/wagtail,mixxorz/wagtail,WQuanfeng/wagtail,100Shapes/wagtail,taedori81/wagtail,nimasmi/wagtail,chrxr/wagtail,kurtrwall/wagtail,WQuanfeng/wagtail,janusnic/wagtail,davecranwell/wagtail,FlipperPA/wagtail,benemery/wagtail,JoshBarr/wagtail,chrxr/wagtail,mikedingjan/wagtail,Tivix/wagtail,rsalmaso/wagtail,gasman/wagtail,chimeno/wagtail,mephizzle/wagtail,nutztherookie/wagtail,kaedroho/wagtail,JoshBarr/wagtail,kurtrwall/wagtail,nilnvoid/wagtail,mjec/wagtail,Toshakins/wagtail,kurtw/wagtail,marctc/wagtail,lojack/wagtail,nilnvoid/wagtail,Tivix/wagtail,darith27/wagtail,JoshBarr/wagtail,hanpama/wagtail,dresiu/wagtail,takeshineshiro/wagtail,100Shapes/wagtail,davecranwell/wagtail,rsalmaso/wagtail,benjaoming/wagtail,jnns/wagtail,jordij/wagtail,jorge-marques/wagtail,gogobook/wagtail,timorieber/wagtail,zerolab/wagtail,mixxorz/wagtail,mixxorz/wagtail,nealtodd/wagtail,chimeno/wagtail,Toshakins/wagtail,dresiu/wagtail,rjsproxy/wagtail,zerolab/wagtail,quru/wagtail,serzans/wagtail,Toshakins/wagtail,hanpama/wagtail,zerolab/wagtail,gasman/wagtail,rjsproxy/wagtail,nilnvoid/wagtail,mjec/wagtail,rjsproxy/wagtail,nimasmi/wagtail,taedori81/wagtail,kurtrwall/wagtail,iansprice/wagtail,thenewguy/wagtail,hanpama/wagtail,lojack/wagtail
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page option
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return
# Don't render if the user doesn't have permission to edit this page
permission_checker = self.page.permissions_for_user(request.user)
if not permission_checker.can_edit():
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
<commit_before>from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
<commit_msg>Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page option<commit_after>
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return
# Don't render if the user doesn't have permission to edit this page
permission_checker = self.page.permissions_for_user(request.user)
if not permission_checker.can_edit():
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page optionfrom django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return
# Don't render if the user doesn't have permission to edit this page
permission_checker = self.page.permissions_for_user(request.user)
if not permission_checker.can_edit():
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
<commit_before>from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
<commit_msg>Edit bird now checks if the user has permission to access admin and edit the page before displaying edit page option<commit_after>from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
class BaseItem(object):
template = 'wagtailadmin/edit_bird/base_item.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), context_instance=RequestContext(request))
class EditPageItem(BaseItem):
template = 'wagtailadmin/edit_bird/edit_page_item.html'
def __init__(self, page):
self.page = page
def render(self, request):
# Don't render if the page doesn't have an id
if not self.page.id:
return
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return
# Don't render if the user doesn't have permission to edit this page
permission_checker = self.page.permissions_for_user(request.user)
if not permission_checker.can_edit():
return
return super(EditPageItem, self).render(request)
def render_edit_bird(request, items):
# Don't render if the user is not logged in
if not request.user.is_authenticated():
return
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Quit if no items rendered
if not rendered_items:
return
# Render the edit bird
return render_to_string('wagtailadmin/edit_bird/edit_bird.html', {
'items': [item.render(request) for item in items],
})
|
e58416edc8ef86abbdebc4711f3afa2b4e90cc1f
|
tests/test_virtualbox.py
|
tests/test_virtualbox.py
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
import time
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CloneVirtualboxTests(unittest.TestCase):
def setUp(self):
self.name = "SaltCloudVirtualboxTestVM"
def tearDown(self):
pass
def test_create_machine(self):
return
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
if __name__ == '__main__':
unittest.main()
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
from tests.helpers import VirtualboxTestCase
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CreationDestructionVirtualboxTests(VirtualboxTestCase):
def setUp(self):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
vm_name = "__temp_test_vm__"
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
virtualbox.vb_destroy_machine(vm_name)
self.assertMachineDoesNotExist(vm_name)
class CloneVirtualboxTests(VirtualboxTestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
self.name = "SaltCloudVirtualboxTestVM"
virtualbox.vb_create_machine(self.name)
self.assertMachineExists(self.name)
def tearDown(self):
virtualbox.vb_destroy_machine(self.name)
self.assertMachineDoesNotExist(self.name)
def test_create_machine(self):
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
self.assertMachineExists(vb_name)
virtualbox.vb_destroy_machine(vb_name)
self.assertMachineDoesNotExist(vb_name)
if __name__ == '__main__':
unittest.main()
|
Improve tests Test VM creation, destruction and cloning
|
Improve tests
Test VM creation, destruction and cloning
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,LoveIsGrief/saltcloud-virtualbox-provider
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
import time
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CloneVirtualboxTests(unittest.TestCase):
def setUp(self):
self.name = "SaltCloudVirtualboxTestVM"
def tearDown(self):
pass
def test_create_machine(self):
return
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
if __name__ == '__main__':
unittest.main()
Improve tests
Test VM creation, destruction and cloning
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
from tests.helpers import VirtualboxTestCase
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CreationDestructionVirtualboxTests(VirtualboxTestCase):
def setUp(self):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
vm_name = "__temp_test_vm__"
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
virtualbox.vb_destroy_machine(vm_name)
self.assertMachineDoesNotExist(vm_name)
class CloneVirtualboxTests(VirtualboxTestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
self.name = "SaltCloudVirtualboxTestVM"
virtualbox.vb_create_machine(self.name)
self.assertMachineExists(self.name)
def tearDown(self):
virtualbox.vb_destroy_machine(self.name)
self.assertMachineDoesNotExist(self.name)
def test_create_machine(self):
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
self.assertMachineExists(vb_name)
virtualbox.vb_destroy_machine(vb_name)
self.assertMachineDoesNotExist(vb_name)
if __name__ == '__main__':
unittest.main()
|
<commit_before># This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
import time
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CloneVirtualboxTests(unittest.TestCase):
def setUp(self):
self.name = "SaltCloudVirtualboxTestVM"
def tearDown(self):
pass
def test_create_machine(self):
return
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
if __name__ == '__main__':
unittest.main()
<commit_msg>Improve tests
Test VM creation, destruction and cloning<commit_after>
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
from tests.helpers import VirtualboxTestCase
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CreationDestructionVirtualboxTests(VirtualboxTestCase):
def setUp(self):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
vm_name = "__temp_test_vm__"
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
virtualbox.vb_destroy_machine(vm_name)
self.assertMachineDoesNotExist(vm_name)
class CloneVirtualboxTests(VirtualboxTestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
self.name = "SaltCloudVirtualboxTestVM"
virtualbox.vb_create_machine(self.name)
self.assertMachineExists(self.name)
def tearDown(self):
virtualbox.vb_destroy_machine(self.name)
self.assertMachineDoesNotExist(self.name)
def test_create_machine(self):
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
self.assertMachineExists(vb_name)
virtualbox.vb_destroy_machine(vb_name)
self.assertMachineDoesNotExist(vb_name)
if __name__ == '__main__':
unittest.main()
|
# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
import time
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CloneVirtualboxTests(unittest.TestCase):
def setUp(self):
self.name = "SaltCloudVirtualboxTestVM"
def tearDown(self):
pass
def test_create_machine(self):
return
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
if __name__ == '__main__':
unittest.main()
Improve tests
Test VM creation, destruction and cloning# This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
from tests.helpers import VirtualboxTestCase
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CreationDestructionVirtualboxTests(VirtualboxTestCase):
def setUp(self):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
vm_name = "__temp_test_vm__"
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
virtualbox.vb_destroy_machine(vm_name)
self.assertMachineDoesNotExist(vm_name)
class CloneVirtualboxTests(VirtualboxTestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
self.name = "SaltCloudVirtualboxTestVM"
virtualbox.vb_create_machine(self.name)
self.assertMachineExists(self.name)
def tearDown(self):
virtualbox.vb_destroy_machine(self.name)
self.assertMachineDoesNotExist(self.name)
def test_create_machine(self):
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
self.assertMachineExists(vb_name)
virtualbox.vb_destroy_machine(vb_name)
self.assertMachineDoesNotExist(vb_name)
if __name__ == '__main__':
unittest.main()
|
<commit_before># This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
import time
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CloneVirtualboxTests(unittest.TestCase):
def setUp(self):
self.name = "SaltCloudVirtualboxTestVM"
def tearDown(self):
pass
def test_create_machine(self):
return
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
if __name__ == '__main__':
unittest.main()
<commit_msg>Improve tests
Test VM creation, destruction and cloning<commit_after># This code assumes vboxapi.py from VirtualBox distribution
# being in PYTHONPATH, or installed system-wide
import unittest
import logging
from tests.helpers import VirtualboxTestCase
import virtualbox
log = logging.getLogger()
log_handler = logging.StreamHandler()
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
log.setLevel(logging.DEBUG)
info = log.info
class BaseVirtualboxTests(unittest.TestCase):
def test_get_manager(self):
self.assertIsNotNone(virtualbox.vb_get_manager())
class CreationDestructionVirtualboxTests(VirtualboxTestCase):
def setUp(self):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
vm_name = "__temp_test_vm__"
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
virtualbox.vb_destroy_machine(vm_name)
self.assertMachineDoesNotExist(vm_name)
class CloneVirtualboxTests(VirtualboxTestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
self.name = "SaltCloudVirtualboxTestVM"
virtualbox.vb_create_machine(self.name)
self.assertMachineExists(self.name)
def tearDown(self):
virtualbox.vb_destroy_machine(self.name)
self.assertMachineDoesNotExist(self.name)
def test_create_machine(self):
vb_name = "NewTestMachine"
virtualbox.vb_clone_vm(
name=vb_name,
clone_from=self.name
)
self.assertMachineExists(vb_name)
virtualbox.vb_destroy_machine(vb_name)
self.assertMachineDoesNotExist(vb_name)
if __name__ == '__main__':
unittest.main()
|
e4dd679f20a066c86a87a42199f66b288a314fcf
|
scons-tools/gmcs.py
|
scons-tools/gmcs.py
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
Use -platform:anycpu while compiling .NET assemblies
|
Use -platform:anycpu while compiling .NET assemblies
|
Python
|
lgpl-2.1
|
eyecreate/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,eyecreate/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,eyecreate/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,eyecreate/tapcfg,eyecreate/tapcfg,juhovh/tapcfg
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
Use -platform:anycpu while compiling .NET assemblies
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
<commit_before>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
<commit_msg>Use -platform:anycpu while compiling .NET assemblies<commit_after>
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
Use -platform:anycpu while compiling .NET assembliesimport os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
<commit_before>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
<commit_msg>Use -platform:anycpu while compiling .NET assemblies<commit_after>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
17ac329783bce0cb88d92659cf58a3ea476c66ef
|
scripts/sound_output_test.py
|
scripts/sound_output_test.py
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_default_output_device_info()))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
n_bytes_to_test = 1024 * 2 * 6
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
# print("len(data): {}, frame_count: {}".format(len(data), frame_count))
if len(data) < n_bytes_to_test:
wf.rewind()
data = wf.readframes(frame_count)
print("Rewinding")
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID)))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=48000,
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
Add support for looping sample
|
Add support for looping sample
|
Python
|
bsd-2-clause
|
mfergie/human-hive
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_default_output_device_info()))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
Add support for looping sample
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
n_bytes_to_test = 1024 * 2 * 6
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
# print("len(data): {}, frame_count: {}".format(len(data), frame_count))
if len(data) < n_bytes_to_test:
wf.rewind()
data = wf.readframes(frame_count)
print("Rewinding")
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID)))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=48000,
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
<commit_before>import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_default_output_device_info()))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
<commit_msg>Add support for looping sample<commit_after>
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
n_bytes_to_test = 1024 * 2 * 6
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
# print("len(data): {}, frame_count: {}".format(len(data), frame_count))
if len(data) < n_bytes_to_test:
wf.rewind()
data = wf.readframes(frame_count)
print("Rewinding")
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID)))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=48000,
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_default_output_device_info()))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
Add support for looping sampleimport pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
n_bytes_to_test = 1024 * 2 * 6
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
# print("len(data): {}, frame_count: {}".format(len(data), frame_count))
if len(data) < n_bytes_to_test:
wf.rewind()
data = wf.readframes(frame_count)
print("Rewinding")
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID)))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=48000,
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
<commit_before>import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_default_output_device_info()))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
<commit_msg>Add support for looping sample<commit_after>import pyaudio
import wave
import time
import sys
import numpy as np
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
n_bytes_to_test = 1024 * 2 * 6
DEVICE_ID=2
def callback(in_data, frame_count, time_info, status):
data = wf.readframes(frame_count)
# npdata = np.frombuffer(data, dtype=np.int16)
# print("len(data): {}, frame_count: {}".format(len(data), frame_count))
if len(data) < n_bytes_to_test:
wf.rewind()
data = wf.readframes(frame_count)
print("Rewinding")
return (data, pyaudio.paContinue)
print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID)))
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=48000,
output_device_index=DEVICE_ID,
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate()
|
0e9d93b7e0998df6f5299bb7666adbcdedb5de28
|
sfblog_project/settings/prod.py
|
sfblog_project/settings/prod.py
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
ALLOWED_HOSTS = [
"blog.starship-factory.ch",
"blog.starship-factory.com",
"blog.starship-factory.de",
"blog.starship-factory.eu",
"blog.starship-factory.org",
"blog.starshipfactory.ch",
"blog.starshipfactory.de",
"blog.starshipfactory.eu",
"blog.starshipfactory.org",
"starship-factory.ch",
"starship-factory.com",
"starship-factory.de",
"starship-factory.eu",
"starship-factory.org",
"starshipfactory.ch",
"starshipfactory.de",
"starshipfactory.eu",
"starshipfactory.org",
"www.starship-factory.ch",
"www.starship-factory.com",
"www.starship-factory.de",
"www.starship-factory.eu",
"www.starship-factory.org",
"www.starshipfactory.ch",
"www.starshipfactory.de",
"www.starshipfactory.eu",
"www.starshipfactory.org",
]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
Whitelist the official names for the starship factory blog.
|
Whitelist the official names for the starship factory blog.
|
Python
|
bsd-3-clause
|
starshipfactory/sfblog,starshipfactory/sfblog,starshipfactory/sfblog,starshipfactory/sfblog
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
Whitelist the official names for the starship factory blog.
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
ALLOWED_HOSTS = [
"blog.starship-factory.ch",
"blog.starship-factory.com",
"blog.starship-factory.de",
"blog.starship-factory.eu",
"blog.starship-factory.org",
"blog.starshipfactory.ch",
"blog.starshipfactory.de",
"blog.starshipfactory.eu",
"blog.starshipfactory.org",
"starship-factory.ch",
"starship-factory.com",
"starship-factory.de",
"starship-factory.eu",
"starship-factory.org",
"starshipfactory.ch",
"starshipfactory.de",
"starshipfactory.eu",
"starshipfactory.org",
"www.starship-factory.ch",
"www.starship-factory.com",
"www.starship-factory.de",
"www.starship-factory.eu",
"www.starship-factory.org",
"www.starshipfactory.ch",
"www.starshipfactory.de",
"www.starshipfactory.eu",
"www.starshipfactory.org",
]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
<commit_before>from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
<commit_msg>Whitelist the official names for the starship factory blog.<commit_after>
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
ALLOWED_HOSTS = [
"blog.starship-factory.ch",
"blog.starship-factory.com",
"blog.starship-factory.de",
"blog.starship-factory.eu",
"blog.starship-factory.org",
"blog.starshipfactory.ch",
"blog.starshipfactory.de",
"blog.starshipfactory.eu",
"blog.starshipfactory.org",
"starship-factory.ch",
"starship-factory.com",
"starship-factory.de",
"starship-factory.eu",
"starship-factory.org",
"starshipfactory.ch",
"starshipfactory.de",
"starshipfactory.eu",
"starshipfactory.org",
"www.starship-factory.ch",
"www.starship-factory.com",
"www.starship-factory.de",
"www.starship-factory.eu",
"www.starship-factory.org",
"www.starshipfactory.ch",
"www.starshipfactory.de",
"www.starshipfactory.eu",
"www.starshipfactory.org",
]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
Whitelist the official names for the starship factory blog.from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
ALLOWED_HOSTS = [
"blog.starship-factory.ch",
"blog.starship-factory.com",
"blog.starship-factory.de",
"blog.starship-factory.eu",
"blog.starship-factory.org",
"blog.starshipfactory.ch",
"blog.starshipfactory.de",
"blog.starshipfactory.eu",
"blog.starshipfactory.org",
"starship-factory.ch",
"starship-factory.com",
"starship-factory.de",
"starship-factory.eu",
"starship-factory.org",
"starshipfactory.ch",
"starshipfactory.de",
"starshipfactory.eu",
"starshipfactory.org",
"www.starship-factory.ch",
"www.starship-factory.com",
"www.starship-factory.de",
"www.starship-factory.eu",
"www.starship-factory.org",
"www.starshipfactory.ch",
"www.starshipfactory.de",
"www.starshipfactory.eu",
"www.starshipfactory.org",
]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
<commit_before>from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
<commit_msg>Whitelist the official names for the starship factory blog.<commit_after>from .base import * # NOQA
from os import environ
environ.setdefault('SFBLOG_CONFIG_PATH', '/etc/sfblog')
ALLOWED_HOSTS = [
"blog.starship-factory.ch",
"blog.starship-factory.com",
"blog.starship-factory.de",
"blog.starship-factory.eu",
"blog.starship-factory.org",
"blog.starshipfactory.ch",
"blog.starshipfactory.de",
"blog.starshipfactory.eu",
"blog.starshipfactory.org",
"starship-factory.ch",
"starship-factory.com",
"starship-factory.de",
"starship-factory.eu",
"starship-factory.org",
"starshipfactory.ch",
"starshipfactory.de",
"starshipfactory.eu",
"starshipfactory.org",
"www.starship-factory.ch",
"www.starship-factory.com",
"www.starship-factory.de",
"www.starship-factory.eu",
"www.starship-factory.org",
"www.starshipfactory.ch",
"www.starshipfactory.de",
"www.starshipfactory.eu",
"www.starshipfactory.org",
]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sfblog',
'USER': 'sfblog',
'PASSWORD': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgpassword').read().strip(),
'HOST': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgserver').read().strip(),
'PORT': open(environ.get('SFBLOG_CONFIG_PATH') + '/pgport').read().strip(),
}
}
|
ab47a14acff93f52f3f995e1b8a0b9e1e742f3fe
|
overseer/urls.py
|
overseer/urls.py
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)d/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)s/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
Format number as string or it errors in some webservers
|
Format number as string or it errors in some webservers
|
Python
|
apache-2.0
|
disqus/overseer
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)d/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)Format number as string or it errors in some webservers
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)s/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
<commit_before>from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)d/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)<commit_msg>Format number as string or it errors in some webservers<commit_after>
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)s/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)d/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)Format number as string or it errors in some webserversfrom django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)s/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
<commit_before>from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)d/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)<commit_msg>Format number as string or it errors in some webservers<commit_after>from django.conf.urls.defaults import *
import os.path
urlpatterns = patterns('',
url(r'^media/(?P<path>.+)?$', 'django.views.static.serve', {
'document_root': os.path.join(os.path.dirname(__file__), 'media'),
'show_indexes': True
}, name='media'),
url(r'^$', 'overseer.views.index', name='index'),
url(r'^service/(?P<slug>[^/]+)/$', 'overseer.views.service', name='service'),
url(r'^service/(?P<slug>[^/]+)/last-event/$', 'overseer.views.last_event', name='last_event'),
url(r'^event/(?P<id>[^/]+)/$', 'overseer.views.event', name='event'),
url(r'^(?P<id>\d+)$', 'django.views.generic.simple.redirect_to', {'url': 'event/%(id)s/'}, name='event_short'),
url(r'^subscribe/$', 'overseer.views.create_subscription', name='create_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/$', 'overseer.views.update_subscription', name='update_subscription'),
url(r'^subscription/(?P<ident>[^/]+)/verify/$', 'overseer.views.verify_subscription', name='verify_subscription'),
)
|
32f1ce16ce9df1f4615a0403ed56bf6fd7dbbef4
|
slackbotpry/event.py
|
slackbotpry/event.py
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
Add missing return of Event methods
|
Add missing return of Event methods
|
Python
|
mit
|
rokurosatp/slackbotpry
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
Add missing return of Event methods
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
<commit_before>class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
<commit_msg>Add missing return of Event methods<commit_after>
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
Add missing return of Event methodsclass Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
<commit_before>class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
<commit_msg>Add missing return of Event methods<commit_after>class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
9bb7dc9c8f7b5208c332017df8b1501315e2601f
|
py/gaarf/utils.py
|
py/gaarf/utils.py
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client)
return report_fetcher.fetch(query_specification, customer_id)
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client, customer_id)
return report_fetcher.fetch(query_specification)
|
Fix incorrect signature for get_customer_ids function
|
Fix incorrect signature for get_customer_ids function
Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547
|
Python
|
apache-2.0
|
google/ads-api-report-fetcher,google/ads-api-report-fetcher,google/ads-api-report-fetcher,google/ads-api-report-fetcher
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client)
return report_fetcher.fetch(query_specification, customer_id)
Fix incorrect signature for get_customer_ids function
Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client, customer_id)
return report_fetcher.fetch(query_specification)
|
<commit_before># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client)
return report_fetcher.fetch(query_specification, customer_id)
<commit_msg>Fix incorrect signature for get_customer_ids function
Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547<commit_after>
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client, customer_id)
return report_fetcher.fetch(query_specification)
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client)
return report_fetcher.fetch(query_specification, customer_id)
Fix incorrect signature for get_customer_ids function
Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client, customer_id)
return report_fetcher.fetch(query_specification)
|
<commit_before># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client)
return report_fetcher.fetch(query_specification, customer_id)
<commit_msg>Fix incorrect signature for get_customer_ids function
Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547<commit_after># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import proto
from .query_editor import QuerySpecification
from .query_executor import AdsReportFetcher
def get_customer_ids(ads_client, customer_id):
query = """
SELECT customer_client.id FROM customer_client
WHERE customer_client.manager = FALSE
"""
query_specification = QuerySpecification(query).generate()
report_fetcher = AdsReportFetcher(ads_client, customer_id)
return report_fetcher.fetch(query_specification)
|
8a7b6962a26de7035d64dce23285960c78678a2a
|
server/resources.py
|
server/resources.py
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help="Text content of comment")
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
Use double quotes on help string
|
Use double quotes on help string
|
Python
|
mit
|
MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
Use double quotes on help string
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help="Text content of comment")
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
<commit_before>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
<commit_msg>Use double quotes on help string<commit_after>
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help="Text content of comment")
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
Use double quotes on help stringfrom flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help="Text content of comment")
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
<commit_before>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
<commit_msg>Use double quotes on help string<commit_after>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help="Text content of comment")
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
e457f09f280bc86bc7b5cdcfb4fa3ebf093402ec
|
services/dropbox.py
|
services/dropbox.py
|
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
|
import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
Upgrade Dropbox to OAuth 2
|
Upgrade Dropbox to OAuth 2
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
Upgrade Dropbox to OAuth 2
|
import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
<commit_before>import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
<commit_msg>Upgrade Dropbox to OAuth 2<commit_after>
|
import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
Upgrade Dropbox to OAuth 2import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
<commit_before>import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
<commit_msg>Upgrade Dropbox to OAuth 2<commit_after>import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
b39dcbd12164cdd682aea2d39e298fe968dcf38e
|
pkg_resources/py31compat.py
|
pkg_resources/py31compat.py
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info <= (3, 2, 6) or
(3, 3) <= sys.version_info <= (3, 3, 5) or
(3, 4) <= sys.version_info <= (3, 4, 1)
)
makedirs = os.makedirs if needs_makedirs else _makedirs_31
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 6) or
(3, 3) <= sys.version_info < (3, 3, 5) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
Correct bounds and boolean selector.
|
Correct bounds and boolean selector.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info <= (3, 2, 6) or
(3, 3) <= sys.version_info <= (3, 3, 5) or
(3, 4) <= sys.version_info <= (3, 4, 1)
)
makedirs = os.makedirs if needs_makedirs else _makedirs_31
Correct bounds and boolean selector.
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 6) or
(3, 3) <= sys.version_info < (3, 3, 5) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
<commit_before>import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info <= (3, 2, 6) or
(3, 3) <= sys.version_info <= (3, 3, 5) or
(3, 4) <= sys.version_info <= (3, 4, 1)
)
makedirs = os.makedirs if needs_makedirs else _makedirs_31
<commit_msg>Correct bounds and boolean selector.<commit_after>
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 6) or
(3, 3) <= sys.version_info < (3, 3, 5) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info <= (3, 2, 6) or
(3, 3) <= sys.version_info <= (3, 3, 5) or
(3, 4) <= sys.version_info <= (3, 4, 1)
)
makedirs = os.makedirs if needs_makedirs else _makedirs_31
Correct bounds and boolean selector.import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 6) or
(3, 3) <= sys.version_info < (3, 3, 5) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
<commit_before>import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info <= (3, 2, 6) or
(3, 3) <= sys.version_info <= (3, 3, 5) or
(3, 4) <= sys.version_info <= (3, 4, 1)
)
makedirs = os.makedirs if needs_makedirs else _makedirs_31
<commit_msg>Correct bounds and boolean selector.<commit_after>import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 6) or
(3, 3) <= sys.version_info < (3, 3, 5) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
0ee2b337b61155044a66ae1f6f173492a51c1150
|
dipsim/fluorophore.py
|
dipsim/fluorophore.py
|
import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
|
import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
|
Modify Fluorophore for more convenient coordinates.
|
Modify Fluorophore for more convenient coordinates.
|
Python
|
mit
|
talonchandler/dipsim,talonchandler/dipsim
|
import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
Modify Fluorophore for more convenient coordinates.
|
import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
|
<commit_before>import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
<commit_msg>Modify Fluorophore for more convenient coordinates.<commit_after>
|
import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
|
import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
Modify Fluorophore for more convenient coordinates.import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
|
<commit_before>import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
<commit_msg>Modify Fluorophore for more convenient coordinates.<commit_after>import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
|
79e5cab2908c26ff80ae5c5e4b37ced9765a952c
|
dbaas/physical/forms/database_infra.py
|
dbaas/physical/forms/database_infra.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args and 'disk_offering' in args[0]:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
Fix database infra save method
|
Fix database infra save method
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
Fix database infra save method
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args and 'disk_offering' in args[0]:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
<commit_msg>Fix database infra save method<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args and 'disk_offering' in args[0]:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
Fix database infra save method# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args and 'disk_offering' in args[0]:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
<commit_msg>Fix database infra save method<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from .. import models
log = logging.getLogger(__name__)
class DatabaseInfraForm(forms.ModelForm):
class Meta:
model = models.DatabaseInfra
def __init__(self, *args, **kwargs):
if args and 'disk_offering' in args[0]:
disk_offering = args[0]['disk_offering']
plan_id = args[0]['plan']
if not disk_offering and plan_id:
plan = models.Plan.objects.get(id=plan_id)
if plan.disk_offering:
args[0]['disk_offering'] = plan.disk_offering.id
super(DatabaseInfraForm, self).__init__(*args, **kwargs)
|
1e264b61d82b009778780926ca730b5dc990a635
|
scikits/image/analysis/__init__.py
|
scikits/image/analysis/__init__.py
|
from spath import shortest_path
|
try:
from spath import shortest_path
except ImportError:
print """*** The shortest path extension has not been compiled. Run
python setup.py build_ext -i
in the source directory to build in-place. Please refer to INSTALL.txt
for further detail."""
|
Allow importing even when not compiled.
|
spath: Allow importing even when not compiled.
|
Python
|
bsd-3-clause
|
youprofit/scikit-image,bsipocz/scikit-image,bennlich/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,bennlich/scikit-image,paalge/scikit-image,chintak/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,rjeli/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,Midafi/scikit-image,youprofit/scikit-image,robintw/scikit-image,WarrenWeckesser/scikits-image,SamHames/scikit-image,GaelVaroquaux/scikits.image,keflavich/scikit-image,emmanuelle/scikits.image,warmspringwinds/scikit-image,SamHames/scikit-image,chintak/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,juliusbierk/scikit-image,GaZ3ll3/scikit-image,oew1v07/scikit-image,robintw/scikit-image,chriscrosscutler/scikit-image,michaelaye/scikit-image,chintak/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,blink1073/scikit-image,Britefury/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,emmanuelle/scikits.image,newville/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,ClinicalGraphics/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,bsipocz/scikit-image,dpshelio/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,michaelpacer/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,rjeli/scikit-image,GaelVaroquaux/scikits.image,keflavich/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,emmanuelle/scikits.image,chriscrosscutler/scikit-image,blink1073/scikit-image,chintak/scikit-image,juliusbierk/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,Hiyorimi/scikit-image,jwiggins/scikit-image,Midafi/scikit-image
|
from spath import shortest_path
spath: Allow importing even when not compiled.
|
try:
from spath import shortest_path
except ImportError:
print """*** The shortest path extension has not been compiled. Run
python setup.py build_ext -i
in the source directory to build in-place. Please refer to INSTALL.txt
for further detail."""
|
<commit_before>from spath import shortest_path
<commit_msg>spath: Allow importing even when not compiled.<commit_after>
|
try:
from spath import shortest_path
except ImportError:
print """*** The shortest path extension has not been compiled. Run
python setup.py build_ext -i
in the source directory to build in-place. Please refer to INSTALL.txt
for further detail."""
|
from spath import shortest_path
spath: Allow importing even when not compiled.try:
from spath import shortest_path
except ImportError:
print """*** The shortest path extension has not been compiled. Run
python setup.py build_ext -i
in the source directory to build in-place. Please refer to INSTALL.txt
for further detail."""
|
<commit_before>from spath import shortest_path
<commit_msg>spath: Allow importing even when not compiled.<commit_after>try:
from spath import shortest_path
except ImportError:
print """*** The shortest path extension has not been compiled. Run
python setup.py build_ext -i
in the source directory to build in-place. Please refer to INSTALL.txt
for further detail."""
|
ab20fb46cf1afb4b59d40a7bd8aba6a29cdebb64
|
eris/pydoc_color.py
|
eris/pydoc_color.py
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
Make pydoc quieter on error.
|
tools: Make pydoc quieter on error.
|
Python
|
artistic-2.0
|
ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
tools: Make pydoc quieter on error.
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
<commit_msg>tools: Make pydoc quieter on error.<commit_after>
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
tools: Make pydoc quieter on error.#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
<commit_msg>tools: Make pydoc quieter on error.<commit_after>#!/usr/bin/env python3.7
# Copyright (C) 2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
3144fac7e9bc938f9eabc3f90fac6acdbaa89ab1
|
pollirio/reactors/markov.py
|
pollirio/reactors/markov.py
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
hasPrev = False
for line in lines:
for curword in line.split():
if curword != '':
curword = curword.lower()
if hasPrev == False:
prevword = currWord
hasPrev = True
else:
markov_chain.setdefault(prevWord, []).append(currWord)
prevWord = currWord
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
has_prev = False
for line in lines:
for cur_word in line.split():
if cur_word != '':
cur_word = cur_word.lower()
if has_prev == False:
prev_word = cur_word
has_prev = True
else:
markov_chain.setdefault(prev_word, []).append(cur_word)
prev_word = cur_word
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
Fix bug in chain creation
|
Fix bug in chain creation
|
Python
|
mit
|
dpaleino/pollirio,dpaleino/pollirio
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
hasPrev = False
for line in lines:
for curword in line.split():
if curword != '':
curword = curword.lower()
if hasPrev == False:
prevword = currWord
hasPrev = True
else:
markov_chain.setdefault(prevWord, []).append(currWord)
prevWord = currWord
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
Fix bug in chain creation
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
has_prev = False
for line in lines:
for cur_word in line.split():
if cur_word != '':
cur_word = cur_word.lower()
if has_prev == False:
prev_word = cur_word
has_prev = True
else:
markov_chain.setdefault(prev_word, []).append(cur_word)
prev_word = cur_word
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
<commit_before># -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
hasPrev = False
for line in lines:
for curword in line.split():
if curword != '':
curword = curword.lower()
if hasPrev == False:
prevword = currWord
hasPrev = True
else:
markov_chain.setdefault(prevWord, []).append(currWord)
prevWord = currWord
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
<commit_msg>Fix bug in chain creation<commit_after>
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
has_prev = False
for line in lines:
for cur_word in line.split():
if cur_word != '':
cur_word = cur_word.lower()
if has_prev == False:
prev_word = cur_word
has_prev = True
else:
markov_chain.setdefault(prev_word, []).append(cur_word)
prev_word = cur_word
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
hasPrev = False
for line in lines:
for curword in line.split():
if curword != '':
curword = curword.lower()
if hasPrev == False:
prevword = currWord
hasPrev = True
else:
markov_chain.setdefault(prevWord, []).append(currWord)
prevWord = currWord
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
Fix bug in chain creation# -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
has_prev = False
for line in lines:
for cur_word in line.split():
if cur_word != '':
cur_word = cur_word.lower()
if has_prev == False:
prev_word = cur_word
has_prev = True
else:
markov_chain.setdefault(prev_word, []).append(cur_word)
prev_word = cur_word
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
<commit_before># -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
hasPrev = False
for line in lines:
for curword in line.split():
if curword != '':
curword = curword.lower()
if hasPrev == False:
prevword = currWord
hasPrev = True
else:
markov_chain.setdefault(prevWord, []).append(currWord)
prevWord = currWord
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
<commit_msg>Fix bug in chain creation<commit_after># -*- coding: utf-8 -*-
from pollirio.reactors import expose
from pollirio import conf, choose_dest
import random
def create_chains(lines):
markov_chain = {}
has_prev = False
for line in lines:
for cur_word in line.split():
if cur_word != '':
cur_word = cur_word.lower()
if has_prev == False:
prev_word = cur_word
has_prev = True
else:
markov_chain.setdefault(prev_word, []).append(cur_word)
prev_word = cur_word
return markov_chain
def make_sentence(markov_chain, words=None):
while True:
if not words:
word = random.choice(markov_chain.keys())
else:
word = random.choice(words)
if word[-1] not in ('.','?'):
break
generated_sentence = word.capitalize()
while word[-1] not in ('.','?'):
newword = random.choice(markov_chain[word])
generated_sentence += ' '+newword
word = newword #TODO fix possible crash if this is not a key (last word parsed)
return generated_sentence
@expose('^%s' % conf.nickname)
def talk(bot, ievent):
source = 'data/pg28867.txt'
mc = create_chains(open(source))
bot.msg(choose_dest(ievent), '%s: %s' % (ievent.nick, make_sentence(mc)))
|
e9bfe96cb3463fe99f08305aab44bd3d7556825a
|
api/radar_api/serializers/group_users.py
|
api/radar_api/serializers/group_users.py
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
Add managed roles to serializer
|
Add managed roles to serializer
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
Add managed roles to serializer
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
<commit_before>from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
<commit_msg>Add managed roles to serializer<commit_after>
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
Add managed roles to serializerfrom radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
<commit_before>from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
<commit_msg>Add managed roles to serializer<commit_after>from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
8145f922aa5569f667252453c8509d9e116745f6
|
flake8diff/utils.py
|
flake8diff/utils.py
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False, log_errors=True):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
if log_errors:
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
Add log_errors flag to _execute
|
Add log_errors flag to _execute
|
Python
|
mit
|
miki725/flake8-diff,dealertrack/flake8-diff
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
Add log_errors flag to _execute
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False, log_errors=True):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
if log_errors:
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
<commit_before>from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
<commit_msg>Add log_errors flag to _execute<commit_after>
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False, log_errors=True):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
if log_errors:
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
Add log_errors flag to _executefrom __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False, log_errors=True):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
if log_errors:
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
<commit_before>from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
<commit_msg>Add log_errors flag to _execute<commit_after>from __future__ import unicode_literals, print_function
import logging
import subprocess
logger = logging.getLogger(__name__)
def _execute(cmd, strict=False, log_errors=True):
"""
Make executing a command locally a little less painful
"""
logger.debug("executing {0}".format(cmd))
process = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
return_code = process.wait()
# flake8 by default returns non-zero
# status code when any violations have been found
# so only log if error message is present
if return_code != 0 and (err or strict):
if log_errors:
logger.error(err)
if strict:
raise subprocess.CalledProcessError(return_code, cmd)
return out.decode('utf-8')
|
f01841e5b3fb9fe6a4f30b15dbf12146971d1b6f
|
flask_aggregator.py
|
flask_aggregator.py
|
import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
import json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
Use app request context directly rather than hacking with a test client
|
Use app request context directly rather than hacking with a test client
|
Python
|
mit
|
ramnes/flask-aggregator
|
import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
Use app request context directly rather than hacking with a test client
|
import json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
<commit_before>import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
<commit_msg>Use app request context directly rather than hacking with a test client<commit_after>
|
import json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
Use app request context directly rather than hacking with a test clientimport json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
<commit_before>import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
<commit_msg>Use app request context directly rather than hacking with a test client<commit_after>import json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|
69baf68b436255eca71ec63578a2fdef4bc03165
|
books.py
|
books.py
|
import falcon
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
|
import falcon
def get_paragraphs(pathname):
result = []
with open(pathname) as f:
for line in f.readlines():
if line != '\n':
result.append(line[:-1])
return result
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
if __name__ == '__main__':
paragraphs = get_paragraphs('/home/sanchopanca/Documents/thunder.txt')
print(paragraphs)
|
Add function which divide text to paragraphs
|
Add function which divide text to paragraphs
|
Python
|
agpl-3.0
|
sanchopanca/reader,sanchopanca/reader
|
import falcon
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
Add function which divide text to paragraphs
|
import falcon
def get_paragraphs(pathname):
result = []
with open(pathname) as f:
for line in f.readlines():
if line != '\n':
result.append(line[:-1])
return result
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
if __name__ == '__main__':
paragraphs = get_paragraphs('/home/sanchopanca/Documents/thunder.txt')
print(paragraphs)
|
<commit_before>import falcon
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
<commit_msg>Add function which divide text to paragraphs<commit_after>
|
import falcon
def get_paragraphs(pathname):
result = []
with open(pathname) as f:
for line in f.readlines():
if line != '\n':
result.append(line[:-1])
return result
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
if __name__ == '__main__':
paragraphs = get_paragraphs('/home/sanchopanca/Documents/thunder.txt')
print(paragraphs)
|
import falcon
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
Add function which divide text to paragraphsimport falcon
def get_paragraphs(pathname):
result = []
with open(pathname) as f:
for line in f.readlines():
if line != '\n':
result.append(line[:-1])
return result
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
if __name__ == '__main__':
paragraphs = get_paragraphs('/home/sanchopanca/Documents/thunder.txt')
print(paragraphs)
|
<commit_before>import falcon
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
<commit_msg>Add function which divide text to paragraphs<commit_after>import falcon
def get_paragraphs(pathname):
result = []
with open(pathname) as f:
for line in f.readlines():
if line != '\n':
result.append(line[:-1])
return result
class BooksResource:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = open('/home/sanchopanca/Documents/thunder.txt').read()
app = falcon.API()
books = BooksResource()
app.add_route('/books', books)
if __name__ == '__main__':
paragraphs = get_paragraphs('/home/sanchopanca/Documents/thunder.txt')
print(paragraphs)
|
5c8780c1f4ba914f20f0dc022cc26becb381f2f1
|
markymark/fields.py
|
markymark/fields.py
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
Revert "Allow widget overwriting on form field"
|
Revert "Allow widget overwriting on form field"
This reverts commit 23a9aaae78cc4d9228f8d0705647fbcadcaf7975.
|
Python
|
mit
|
moccu/django-markymark,moccu/django-markymark,moccu/django-markymark
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
Revert "Allow widget overwriting on form field"
This reverts commit 23a9aaae78cc4d9228f8d0705647fbcadcaf7975.
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
<commit_before>from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
<commit_msg>Revert "Allow widget overwriting on form field"
This reverts commit 23a9aaae78cc4d9228f8d0705647fbcadcaf7975.<commit_after>
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
Revert "Allow widget overwriting on form field"
This reverts commit 23a9aaae78cc4d9228f8d0705647fbcadcaf7975.from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
<commit_before>from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
<commit_msg>Revert "Allow widget overwriting on form field"
This reverts commit 23a9aaae78cc4d9228f8d0705647fbcadcaf7975.<commit_after>from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
|
3330678d6474a876e2d18edce995bd82ba027472
|
gittools.py
|
gittools.py
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
Handle symbolic refs in .git/HEAD
|
OTHER: Handle symbolic refs in .git/HEAD
|
Python
|
lgpl-2.1
|
xmms2/xmms2-stable,dreamerc/xmms2,oneman/xmms2-oneman,xmms2/xmms2-stable,chrippa/xmms2,theefer/xmms2,oneman/xmms2-oneman-old,six600110/xmms2,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,chrippa/xmms2,theeternalsw0rd/xmms2,krad-radio/xmms2-krad,theeternalsw0rd/xmms2,oneman/xmms2-oneman,oneman/xmms2-oneman,six600110/xmms2,oneman/xmms2-oneman-old,krad-radio/xmms2-krad,theefer/xmms2,theefer/xmms2,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,oneman/xmms2-oneman,dreamerc/xmms2,xmms2/xmms2-stable,dreamerc/xmms2,chrippa/xmms2,theefer/xmms2,six600110/xmms2,oneman/xmms2-oneman,mantaraya36/xmms2-mantaraya36,theefer/xmms2,oneman/xmms2-oneman-old,chrippa/xmms2,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36,dreamerc/xmms2,theeternalsw0rd/xmms2,theefer/xmms2,chrippa/xmms2,krad-radio/xmms2-krad,oneman/xmms2-oneman,dreamerc/xmms2,six600110/xmms2,six600110/xmms2,mantaraya36/xmms2-mantaraya36,oneman/xmms2-oneman,theeternalsw0rd/xmms2,xmms2/xmms2-stable,oneman/xmms2-oneman-old,xmms2/xmms2-stable,six600110/xmms2,oneman/xmms2-oneman-old,krad-radio/xmms2-krad,krad-radio/xmms2-krad,krad-radio/xmms2-krad,chrippa/xmms2,theefer/xmms2,theeternalsw0rd/xmms2
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
OTHER: Handle symbolic refs in .git/HEAD
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
<commit_before>import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
<commit_msg>OTHER: Handle symbolic refs in .git/HEAD<commit_after>
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
OTHER: Handle symbolic refs in .git/HEADimport os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
<commit_before>import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
<commit_msg>OTHER: Handle symbolic refs in .git/HEAD<commit_after>import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
03671a01cb5ea359c22e954a8381bbfd30bce094
|
lc560_subarray_sum_equals_k.py
|
lc560_subarray_sum_equals_k.py
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
Complete naive solution by nested for loops
|
Complete naive solution by nested for loops
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
Complete naive solution by nested for loops
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
<commit_before>"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
<commit_msg>Complete naive solution by nested for loops<commit_after>
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
Complete naive solution by nested for loops"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
<commit_before>"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
<commit_msg>Complete naive solution by nested for loops<commit_after>"""560. Subarray Sum Equals K
Medium
Given an array of integers and an integer k, you need to find the total
number of continuous subarrays whose sum equals to k.
Example 1:
Input: nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000] and the range of the
integer k is [-1e7, 1e7].
"""
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
7bcd595429c18a38dfcb81e39cbf793dc969136f
|
mongoforms/utils.py
|
mongoforms/utils.py
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name, field in meta.document._fields.iteritems():
# skip excluded fields
if field_name not in meta_exclude:
yield (field_name, field)
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name in meta.document._fields_ordered:
# skip excluded fields
if field_name not in meta_exclude:
field = meta.document._fields.get(field_name)
yield (field_name, field)
|
Order MongoForm fields according to the Document
|
Order MongoForm fields according to the Document
|
Python
|
bsd-3-clause
|
pimentech/django-mongoforms,pimentech/django-mongoforms
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name, field in meta.document._fields.iteritems():
# skip excluded fields
if field_name not in meta_exclude:
yield (field_name, field)
Order MongoForm fields according to the Document
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name in meta.document._fields_ordered:
# skip excluded fields
if field_name not in meta_exclude:
field = meta.document._fields.get(field_name)
yield (field_name, field)
|
<commit_before>from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name, field in meta.document._fields.iteritems():
# skip excluded fields
if field_name not in meta_exclude:
yield (field_name, field)
<commit_msg>Order MongoForm fields according to the Document<commit_after>
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name in meta.document._fields_ordered:
# skip excluded fields
if field_name not in meta_exclude:
field = meta.document._fields.get(field_name)
yield (field_name, field)
|
from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name, field in meta.document._fields.iteritems():
# skip excluded fields
if field_name not in meta_exclude:
yield (field_name, field)
Order MongoForm fields according to the Documentfrom django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name in meta.document._fields_ordered:
# skip excluded fields
if field_name not in meta_exclude:
field = meta.document._fields.get(field_name)
yield (field_name, field)
|
<commit_before>from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name, field in meta.document._fields.iteritems():
# skip excluded fields
if field_name not in meta_exclude:
yield (field_name, field)
<commit_msg>Order MongoForm fields according to the Document<commit_after>from django import forms
from mongoengine.base import ValidationError
from bson.objectid import ObjectId
def mongoengine_validate_wrapper(old_clean, new_clean):
"""
A wrapper function to validate formdata against mongoengine-field
validator and raise a proper django.forms ValidationError if there
are any problems.
"""
def inner_validate(value):
value = old_clean(value)
try:
new_clean(value)
return value
except ValidationError, e:
raise forms.ValidationError(e)
return inner_validate
def iter_valid_fields(meta):
"""walk through the available valid fields.."""
# fetch field configuration and always add the id_field as exclude
meta_fields = getattr(meta, 'fields', ())
meta_exclude = getattr(meta, 'exclude', ())
id_field = meta.document._meta.get('id_field', 'id')
if type(meta.document._fields.get(id_field)) == ObjectId:
meta_exclude += (meta.document._meta.get(id_field),)
# walk through meta_fields or through the document fields to keep
# meta_fields order in the form
if meta_fields:
for field_name in meta_fields:
field = meta.document._fields.get(field_name)
if field:
yield (field_name, field)
else:
for field_name in meta.document._fields_ordered:
# skip excluded fields
if field_name not in meta_exclude:
field = meta.document._fields.get(field_name)
yield (field_name, field)
|
8ae391d738e3982c9d52b970a653f0b67724dce5
|
integration/main.py
|
integration/main.py
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
"""
# TODO: Only really makes sense at the task level though...
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
Tweak old integration test docstring
|
Tweak old integration test docstring
|
Python
|
bsd-2-clause
|
fabric/fabric
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
Tweak old integration test docstring
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
"""
# TODO: Only really makes sense at the task level though...
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
<commit_before>from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
<commit_msg>Tweak old integration test docstring<commit_after>
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
"""
# TODO: Only really makes sense at the task level though...
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
Tweak old integration test docstringfrom spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
"""
# TODO: Only really makes sense at the task level though...
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
<commit_before>from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
<commit_msg>Tweak old integration test docstring<commit_after>from spec import skip, Spec, ok_
from fabric.connection import Connection
class Main(Spec):
def connection_open_generates_real_connection(self):
c = Connection('localhost')
c.open()
ok_(c.client.get_transport().active)
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
"""
# TODO: Only really makes sense at the task level though...
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
434f6e7b920d50d08d2cd139b479d5017184a44a
|
packages/Python/lldbsuite/test/lang/swift/foundation_value_types/data/TestSwiftFoundationTypeData.py
|
packages/Python/lldbsuite/test/lang/swift/foundation_value_types/data/TestSwiftFoundationTypeData.py
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
# https://bugs.swift.org/browse/SR-3320
# This test fails with an assertion error with stdlib resilience enabled:
# https://github.com/apple/swift/pull/13573
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipIf])
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
Revert "Skip an x-failed test due to an unexpected assert"
|
Revert "Skip an x-failed test due to an unexpected assert"
This reverts commit b04c3edb7a8bcb5265a1ea4265714dcb8d1b185a.
(cherry picked from commit 76be04b7474d9a12d45256c3f31719e3b2ac425d)
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
# https://bugs.swift.org/browse/SR-3320
# This test fails with an assertion error with stdlib resilience enabled:
# https://github.com/apple/swift/pull/13573
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipIf])
Revert "Skip an x-failed test due to an unexpected assert"
This reverts commit b04c3edb7a8bcb5265a1ea4265714dcb8d1b185a.
(cherry picked from commit 76be04b7474d9a12d45256c3f31719e3b2ac425d)
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
<commit_before># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
# https://bugs.swift.org/browse/SR-3320
# This test fails with an assertion error with stdlib resilience enabled:
# https://github.com/apple/swift/pull/13573
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipIf])
<commit_msg>Revert "Skip an x-failed test due to an unexpected assert"
This reverts commit b04c3edb7a8bcb5265a1ea4265714dcb8d1b185a.
(cherry picked from commit 76be04b7474d9a12d45256c3f31719e3b2ac425d)<commit_after>
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
# https://bugs.swift.org/browse/SR-3320
# This test fails with an assertion error with stdlib resilience enabled:
# https://github.com/apple/swift/pull/13573
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipIf])
Revert "Skip an x-failed test due to an unexpected assert"
This reverts commit b04c3edb7a8bcb5265a1ea4265714dcb8d1b185a.
(cherry picked from commit 76be04b7474d9a12d45256c3f31719e3b2ac425d)# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
<commit_before># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
# https://bugs.swift.org/browse/SR-3320
# This test fails with an assertion error with stdlib resilience enabled:
# https://github.com/apple/swift/pull/13573
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipIf])
<commit_msg>Revert "Skip an x-failed test due to an unexpected assert"
This reverts commit b04c3edb7a8bcb5265a1ea4265714dcb8d1b185a.
(cherry picked from commit 76be04b7474d9a12d45256c3f31719e3b2ac425d)<commit_after># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
1510bc44b8017771e60618d617cfefd4eaf32cde
|
addie/initialization/init_step1.py
|
addie/initialization/init_step1.py
|
from PyQt4 import QtGui
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QtGui.QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
from qtpy.QtWidgets import (QLabel)
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
Convert from pyqt4 to qtpy
|
Convert from pyqt4 to qtpy
|
Python
|
mit
|
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
|
from PyQt4 import QtGui
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QtGui.QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)Convert from pyqt4 to qtpy
|
from qtpy.QtWidgets import (QLabel)
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
<commit_before>from PyQt4 import QtGui
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QtGui.QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)<commit_msg>Convert from pyqt4 to qtpy<commit_after>
|
from qtpy.QtWidgets import (QLabel)
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
from PyQt4 import QtGui
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QtGui.QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)Convert from pyqt4 to qtpyfrom qtpy.QtWidgets import (QLabel)
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
<commit_before>from PyQt4 import QtGui
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QtGui.QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)<commit_msg>Convert from pyqt4 to qtpy<commit_after>from qtpy.QtWidgets import (QLabel)
from addie.step1_handler.step1_gui_handler import Step1GuiHandler
class InitStep1(object):
def __init__(self, parent=None):
self.parent = parent
self.parent.ui.diamond.setFocus(True)
self.set_statusBar()
self.set_title()
def set_title(self):
o_gui = Step1GuiHandler(parent = self.parent)
o_gui.set_main_window_title()
def set_statusBar(self):
status_bar_label = QLabel()
self.parent.ui.statusbar.addPermanentWidget(status_bar_label)
|
eef348f74a13f42780713aa9dfca9cc617fa52c8
|
neutron/callbacks/resources.py
|
neutron/callbacks/resources.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floating_ip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floatingip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
Make the value of FLOATING_IP match with api resource
|
callbacks: Make the value of FLOATING_IP match with api resource
Note: BEFORE_RESPONSE code assumes they match. Nova notifier uses
FLOATING_IP/BEFORE_RESPONSE.
Closes-Bug: #1642918
Change-Id: If834ca1ee52d538cae4a5d164e0e0343c7019546
|
Python
|
apache-2.0
|
openstack/neutron,mahak/neutron,huntxu/neutron,noironetworks/neutron,cloudbase/neutron,eayunstack/neutron,openstack/neutron,mahak/neutron,openstack/neutron,eayunstack/neutron,huntxu/neutron,mahak/neutron,noironetworks/neutron,cloudbase/neutron
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floating_ip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
callbacks: Make the value of FLOATING_IP match with api resource
Note: BEFORE_RESPONSE code assumes they match. Nova notifier uses
FLOATING_IP/BEFORE_RESPONSE.
Closes-Bug: #1642918
Change-Id: If834ca1ee52d538cae4a5d164e0e0343c7019546
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floatingip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floating_ip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
<commit_msg>callbacks: Make the value of FLOATING_IP match with api resource
Note: BEFORE_RESPONSE code assumes they match. Nova notifier uses
FLOATING_IP/BEFORE_RESPONSE.
Closes-Bug: #1642918
Change-Id: If834ca1ee52d538cae4a5d164e0e0343c7019546<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floatingip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floating_ip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
callbacks: Make the value of FLOATING_IP match with api resource
Note: BEFORE_RESPONSE code assumes they match. Nova notifier uses
FLOATING_IP/BEFORE_RESPONSE.
Closes-Bug: #1642918
Change-Id: If834ca1ee52d538cae4a5d164e0e0343c7019546# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floatingip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floating_ip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
<commit_msg>callbacks: Make the value of FLOATING_IP match with api resource
Note: BEFORE_RESPONSE code assumes they match. Nova notifier uses
FLOATING_IP/BEFORE_RESPONSE.
Closes-Bug: #1642918
Change-Id: If834ca1ee52d538cae4a5d164e0e0343c7019546<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
AGENT = 'agent'
EXTERNAL_NETWORK = 'external_network'
FLOATING_IP = 'floatingip'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
ROUTER = 'router'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNET_GATEWAY = 'subnet_gateway'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
|
a373234ad9ce2c0e5b2c9917e50a2b9d97293674
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.10'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
Update dsub version to 0.3.11.dev0
|
Update dsub version to 0.3.11.dev0
PiperOrigin-RevId: 324910070
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.10'
Update dsub version to 0.3.11.dev0
PiperOrigin-RevId: 324910070
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.10'
<commit_msg>Update dsub version to 0.3.11.dev0
PiperOrigin-RevId: 324910070<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.10'
Update dsub version to 0.3.11.dev0
PiperOrigin-RevId: 324910070# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.10'
<commit_msg>Update dsub version to 0.3.11.dev0
PiperOrigin-RevId: 324910070<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
2aa41403436a1629d3d0f8c83b51f685f7b0f421
|
main/remote_exe.py
|
main/remote_exe.py
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.getcwd(),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.path.dirname(__file__),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
Fix bug: log dir path: set it thru file location, not current working path
|
Fix bug: log dir path: set it thru file location, not current working path
|
Python
|
mit
|
trelay/multi-executor
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.getcwd(),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
Fix bug: log dir path: set it thru file location, not current working path
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.path.dirname(__file__),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
<commit_before>#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.getcwd(),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
<commit_msg>Fix bug: log dir path: set it thru file location, not current working path<commit_after>
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.path.dirname(__file__),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.getcwd(),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
Fix bug: log dir path: set it thru file location, not current working path#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.path.dirname(__file__),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
<commit_before>#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.getcwd(),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
<commit_msg>Fix bug: log dir path: set it thru file location, not current working path<commit_after>#!/usr/bin/python
import shlex, os
from subprocess import Popen, PIPE
#from time import sleep
import threading
def exe_cmd(log_name, command_line):
args = shlex.split(command_line)
log_dir=os.path.join(os.path.dirname(__file__),"..", "log")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
file_name= os.path.join(log_dir, log_name)
f_d=open(file_name,"w+")
p = Popen(args, stdout=f_d, stdin=PIPE, stderr=f_d)
#Dump stdout and stderr to log file
output = p.communicate()
print command_line+ " " + "executed!"
f_d.close()
def create_thread(argv_list):
io_thread=[]
for argv in argv_list:
thread=threading.Thread(target=exe_cmd,kwargs =argv)
io_thread.append(thread)
for thread in io_thread: #Add this "for loop" for better understanding
thread.start()
for thread in io_thread:
thread.join()
|
02454937b500afe1dc9b7387e63f9b3327be6a16
|
contrail_provisioning/config/templates/contrail_discovery_conf.py
|
contrail_provisioning/config/templates/contrail_discovery_conf.py
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed]
######################################################################
""")
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
# Use consistent hashing for Collector for better handling of HA events
[Collector]
policy = chash
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed | chash]
######################################################################
""")
|
Enable consistent-hashing policy for Collector
|
Enable consistent-hashing policy for Collector
Change-Id: I7ed6747b6c3ef95d8fed0c62e786c7039fb510a6
Fixes-Bug: #1600368
|
Python
|
apache-2.0
|
Juniper/contrail-provisioning,Juniper/contrail-provisioning
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed]
######################################################################
""")
Enable consistent-hashing policy for Collector
Change-Id: I7ed6747b6c3ef95d8fed0c62e786c7039fb510a6
Fixes-Bug: #1600368
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
# Use consistent hashing for Collector for better handling of HA events
[Collector]
policy = chash
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed | chash]
######################################################################
""")
|
<commit_before>import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed]
######################################################################
""")
<commit_msg>Enable consistent-hashing policy for Collector
Change-Id: I7ed6747b6c3ef95d8fed0c62e786c7039fb510a6
Fixes-Bug: #1600368<commit_after>
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
# Use consistent hashing for Collector for better handling of HA events
[Collector]
policy = chash
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed | chash]
######################################################################
""")
|
import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed]
######################################################################
""")
Enable consistent-hashing policy for Collector
Change-Id: I7ed6747b6c3ef95d8fed0c62e786c7039fb510a6
Fixes-Bug: #1600368import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
# Use consistent hashing for Collector for better handling of HA events
[Collector]
policy = chash
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed | chash]
######################################################################
""")
|
<commit_before>import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed]
######################################################################
""")
<commit_msg>Enable consistent-hashing policy for Collector
Change-Id: I7ed6747b6c3ef95d8fed0c62e786c7039fb510a6
Fixes-Bug: #1600368<commit_after>import string
template = string.Template("""
[DEFAULTS]
zk_server_ip=$__contrail_zk_server_ip__
zk_server_port=$__contrail_zk_server_port__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
log_local=$__contrail_log_local__
log_file=$__contrail_log_file__
cassandra_server_list=$__contrail_cassandra_server_list__
log_level=SYS_NOTICE
# minimim time to allow client to cache service information (seconds)
ttl_min=300
# maximum time to allow client to cache service information (seconds)
ttl_max=1800
# health check ping interval <=0 for disabling
hc_interval=$__contrail_healthcheck_interval__
# maximum hearbeats to miss before server will declare publisher out of
# service.
hc_max_miss=3
# use short TTL for agressive rescheduling if all services are not up
ttl_short=1
# for DNS service, we use fixed policy
# even when the cluster has more than two control nodes, only two of these
# should provide the DNS service
[DNS-SERVER]
policy = fixed
# Use consistent hashing for Collector for better handling of HA events
[Collector]
policy = chash
######################################################################
# Other service specific knobs ...
# use short TTL for agressive rescheduling if all services are not up
# ttl_short=1
# specify policy to use when assigning services
# policy = [load-balance | round-robin | fixed | chash]
######################################################################
""")
|
93dd1ad89d030e626c5692954c38526c9b851fd8
|
salt/matchers/list_match.py
|
salt/matchers/list_match.py
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning(
'List matcher unexpectedly did not return, for target %s, '
'this is probably a bug.', tgt
)
return False
|
Add target to the warning log message
|
Add target to the warning log message
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
Add target to the warning log message
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning(
'List matcher unexpectedly did not return, for target %s, '
'this is probably a bug.', tgt
)
return False
|
<commit_before># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
<commit_msg>Add target to the warning log message<commit_after>
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning(
'List matcher unexpectedly did not return, for target %s, '
'this is probably a bug.', tgt
)
return False
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
Add target to the warning log message# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning(
'List matcher unexpectedly did not return, for target %s, '
'this is probably a bug.', tgt
)
return False
|
<commit_before># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
<commit_msg>Add target to the warning log message<commit_after># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning(
'List matcher unexpectedly did not return, for target %s, '
'this is probably a bug.', tgt
)
return False
|
32c40710a562b194385f2340bf882cb3709b74e3
|
masquerade/urls.py
|
masquerade/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^mask/$', 'masquerade.views.mask'),
url(r'^unmask/$', 'masquerade.views.unmask'),
)
|
from django.conf.urls import patterns, url
from masquerade.views import mask
from masquerade.views import unmask
urlpatterns = [
url(r'^mask/$', mask),
url(r'^unmask/$', unmask),
]
|
Fix Django 1.10 deprecation warning
|
Fix Django 1.10 deprecation warning
|
Python
|
apache-2.0
|
erikcw/django-masquerade,erikcw/django-masquerade,erikcw/django-masquerade
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^mask/$', 'masquerade.views.mask'),
url(r'^unmask/$', 'masquerade.views.unmask'),
)
Fix Django 1.10 deprecation warning
|
from django.conf.urls import patterns, url
from masquerade.views import mask
from masquerade.views import unmask
urlpatterns = [
url(r'^mask/$', mask),
url(r'^unmask/$', unmask),
]
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^mask/$', 'masquerade.views.mask'),
url(r'^unmask/$', 'masquerade.views.unmask'),
)
<commit_msg>Fix Django 1.10 deprecation warning<commit_after>
|
from django.conf.urls import patterns, url
from masquerade.views import mask
from masquerade.views import unmask
urlpatterns = [
url(r'^mask/$', mask),
url(r'^unmask/$', unmask),
]
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^mask/$', 'masquerade.views.mask'),
url(r'^unmask/$', 'masquerade.views.unmask'),
)
Fix Django 1.10 deprecation warningfrom django.conf.urls import patterns, url
from masquerade.views import mask
from masquerade.views import unmask
urlpatterns = [
url(r'^mask/$', mask),
url(r'^unmask/$', unmask),
]
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^mask/$', 'masquerade.views.mask'),
url(r'^unmask/$', 'masquerade.views.unmask'),
)
<commit_msg>Fix Django 1.10 deprecation warning<commit_after>from django.conf.urls import patterns, url
from masquerade.views import mask
from masquerade.views import unmask
urlpatterns = [
url(r'^mask/$', mask),
url(r'^unmask/$', unmask),
]
|
69304be6df25ba2db53985b3d1e2e66954b7d655
|
genes/lib/traits.py
|
genes/lib/traits.py
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps(func)
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps(func)
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
Add argument to @wraps decorator
|
Add argument to @wraps decorator
|
Python
|
mit
|
hatchery/Genepool2,hatchery/genepool
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
Add argument to @wraps decorator
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps(func)
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps(func)
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
<commit_before>from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
<commit_msg>Add argument to @wraps decorator<commit_after>
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps(func)
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps(func)
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
Add argument to @wraps decoratorfrom functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps(func)
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps(func)
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
<commit_before>from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
<commit_msg>Add argument to @wraps decorator<commit_after>from functools import wraps
def if_any(*conds):
def wrapper(func):
@wraps(func)
def run_if_any(*args, **kwargs):
if any(conds):
return func(*args, **kwargs)
return run_if_any
return wrapper
def if_all(*conds):
def wrapper(func):
@wraps(func)
def run_if_all(*args, **kwargs):
if all(conds):
return func(*args, **kwargs)
return run_if_all
return wrapper
|
76244d9a9750d1e095884b3a453caffa6d1ef3c4
|
main/views/views.py
|
main/views/views.py
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
def index(request):
users = UserProfile.objects.all()
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': Task.objects.filter(status=True).order_by('-timestamp_create')[:10],
'services': Service.objects.filter(published=True).order_by('-created_at')[:10],
})
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
from voting.models import Vote
def index(request):
users = UserProfile.objects.all()
tasks_last = Task.objects.filter(status=True).order_by('-timestamp_create')[:10]
votes = Vote.objects.get_scores_in_bulk(tasks_last)
tasks_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, tasks_last))
services_last = Service.objects.filter(published=True).order_by('-created_at')[:10]
votes = Vote.objects.get_scores_in_bulk(services_last)
services_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, services_last))
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': tasks_last,
'services': services_last,
})
|
Hide last task and services with negative score from main page
|
Hide last task and services with negative score from main page
|
Python
|
agpl-3.0
|
Davidyuk/witcoin,Davidyuk/witcoin
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
def index(request):
users = UserProfile.objects.all()
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': Task.objects.filter(status=True).order_by('-timestamp_create')[:10],
'services': Service.objects.filter(published=True).order_by('-created_at')[:10],
})
Hide last task and services with negative score from main page
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
from voting.models import Vote
def index(request):
users = UserProfile.objects.all()
tasks_last = Task.objects.filter(status=True).order_by('-timestamp_create')[:10]
votes = Vote.objects.get_scores_in_bulk(tasks_last)
tasks_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, tasks_last))
services_last = Service.objects.filter(published=True).order_by('-created_at')[:10]
votes = Vote.objects.get_scores_in_bulk(services_last)
services_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, services_last))
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': tasks_last,
'services': services_last,
})
|
<commit_before>from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
def index(request):
users = UserProfile.objects.all()
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': Task.objects.filter(status=True).order_by('-timestamp_create')[:10],
'services': Service.objects.filter(published=True).order_by('-created_at')[:10],
})
<commit_msg>Hide last task and services with negative score from main page<commit_after>
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
from voting.models import Vote
def index(request):
users = UserProfile.objects.all()
tasks_last = Task.objects.filter(status=True).order_by('-timestamp_create')[:10]
votes = Vote.objects.get_scores_in_bulk(tasks_last)
tasks_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, tasks_last))
services_last = Service.objects.filter(published=True).order_by('-created_at')[:10]
votes = Vote.objects.get_scores_in_bulk(services_last)
services_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, services_last))
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': tasks_last,
'services': services_last,
})
|
from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
def index(request):
users = UserProfile.objects.all()
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': Task.objects.filter(status=True).order_by('-timestamp_create')[:10],
'services': Service.objects.filter(published=True).order_by('-created_at')[:10],
})
Hide last task and services with negative score from main pagefrom django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
from voting.models import Vote
def index(request):
users = UserProfile.objects.all()
tasks_last = Task.objects.filter(status=True).order_by('-timestamp_create')[:10]
votes = Vote.objects.get_scores_in_bulk(tasks_last)
tasks_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, tasks_last))
services_last = Service.objects.filter(published=True).order_by('-created_at')[:10]
votes = Vote.objects.get_scores_in_bulk(services_last)
services_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, services_last))
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': tasks_last,
'services': services_last,
})
|
<commit_before>from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
def index(request):
users = UserProfile.objects.all()
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': Task.objects.filter(status=True).order_by('-timestamp_create')[:10],
'services': Service.objects.filter(published=True).order_by('-created_at')[:10],
})
<commit_msg>Hide last task and services with negative score from main page<commit_after>from django.shortcuts import render
from main.models import UserProfile, Transaction, Task, Service
from django.db.models import Avg
from voting.models import Vote
def index(request):
users = UserProfile.objects.all()
tasks_last = Task.objects.filter(status=True).order_by('-timestamp_create')[:10]
votes = Vote.objects.get_scores_in_bulk(tasks_last)
tasks_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, tasks_last))
services_last = Service.objects.filter(published=True).order_by('-created_at')[:10]
votes = Vote.objects.get_scores_in_bulk(services_last)
services_last = list(filter(lambda x: x.id not in votes or votes[x.id]['score'] > 0, services_last))
return render(request, 'main/index.html', {
'users_top_spend': sorted(users, key=lambda a: a.spend(), reverse=True)[:4],
'users_top_balance': sorted(users, key=lambda a: a.balance(), reverse=True)[:4],
'users_last': UserProfile.objects.order_by('-user__date_joined')[:4],
'users_count': UserProfile.objects.count(),
'money_all': -UserProfile.objects.get(pk=1).balance(),
'money_avg': Transaction.objects.filter(status=True).exclude(user_from=1).aggregate(Avg('amount'))['amount__avg'] or 0,
'tasks': tasks_last,
'services': services_last,
})
|
cb3312419f20b10d92cff4ec06606a2b7ee91950
|
metaopt/__init__.py
|
metaopt/__init__.py
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
#__maintainer__ = "first last"
#__maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek, Justin Heinermann'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
# __maintainer__ = "first last"
# __maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
Add Justin to authors, fix comments
|
Add Justin to authors, fix comments
|
Python
|
bsd-3-clause
|
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
#__maintainer__ = "first last"
#__maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
Add Justin to authors, fix comments
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek, Justin Heinermann'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
# __maintainer__ = "first last"
# __maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
<commit_before># -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
#__maintainer__ = "first last"
#__maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
<commit_msg>Add Justin to authors, fix comments<commit_after>
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek, Justin Heinermann'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
# __maintainer__ = "first last"
# __maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
#__maintainer__ = "first last"
#__maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
Add Justin to authors, fix comments# -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek, Justin Heinermann'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
# __maintainer__ = "first last"
# __maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
<commit_before># -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
#__maintainer__ = "first last"
#__maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
<commit_msg>Add Justin to authors, fix comments<commit_after># -*- coding:utf-8 -*-
"""
Root package of MetaOpt.
"""
__author__ = 'Renke Grunwald, Bengt Lüers, Jendrik Poloczek, Justin Heinermann'
__author_email__ = 'info@metaopt.org'
__license__ = '3-Clause BSD'
# __maintainer__ = "first last"
# __maintainer_email__ = "first.last@example.com"
__url__ = 'http://organic-es.tumblr.com/'
__version__ = '0.0.1'
|
a09cf17583ca558d3e4c77a1682ed01c223f182d
|
ceph_deploy/util/arg_validators.py
|
ceph_deploy/util/arg_validators.py
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.gethostbyname(host)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.getaddrinfo(host, 0)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
Support hostname that resolve to IPv6-only address
|
Support hostname that resolve to IPv6-only address
The current hostname validation does not cope with IPv6-only hostnames. Use getaddrinfo instead of gethostbyname to fix this. getaddrinfo raises the same exceptions and should work like a drop-in-replacement in this scenario.
We should also address the IPv4-only check for if then input is an IP-address but the use of split on ':' is problematic and prevents code to check for IPv6-addresses as input. I'm not sure what the thought behind allowing "name:host" is so leaving it untouched.
I have proposed code to check and warn for both IPv4 and IPv6 address input that I can provide as soon as I understand the scope of parsing "name:host" input.
|
Python
|
mit
|
rtulke/ceph-deploy,ceph/ceph-deploy,codenrhoden/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy,isyippee/ceph-deploy,alfredodeza/ceph-deploy,ktdreyer/ceph-deploy,branto1/ceph-deploy,SUSE/ceph-deploy,zhouyuan/ceph-deploy,Vicente-Cheng/ceph-deploy,trhoden/ceph-deploy,ceph/ceph-deploy,branto1/ceph-deploy,zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,jumpstarter-io/ceph-deploy,Vicente-Cheng/ceph-deploy,shenhequnying/ceph-deploy,SUSE/ceph-deploy,ddiss/ceph-deploy,rtulke/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,osynge/ceph-deploy,osynge/ceph-deploy,ktdreyer/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,trhoden/ceph-deploy,ddiss/ceph-deploy,isyippee/ceph-deploy,alfredodeza/ceph-deploy,imzhulei/ceph-deploy,jumpstarter-io/ceph-deploy,ghxandsky/ceph-deploy,ghxandsky/ceph-deploy
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.gethostbyname(host)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
Support hostname that resolve to IPv6-only address
The current hostname validation does not cope with IPv6-only hostnames. Use getaddrinfo instead of gethostbyname to fix this. getaddrinfo raises the same exceptions and should work like a drop-in-replacement in this scenario.
We should also address the IPv4-only check for if then input is an IP-address but the use of split on ':' is problematic and prevents code to check for IPv6-addresses as input. I'm not sure what the thought behind allowing "name:host" is so leaving it untouched.
I have proposed code to check and warn for both IPv4 and IPv6 address input that I can provide as soon as I understand the scope of parsing "name:host" input.
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.getaddrinfo(host, 0)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
<commit_before>import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.gethostbyname(host)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
<commit_msg>Support hostname that resolve to IPv6-only address
The current hostname validation does not cope with IPv6-only hostnames. Use getaddrinfo instead of gethostbyname to fix this. getaddrinfo raises the same exceptions and should work like a drop-in-replacement in this scenario.
We should also address the IPv4-only check for if then input is an IP-address but the use of split on ':' is problematic and prevents code to check for IPv6-addresses as input. I'm not sure what the thought behind allowing "name:host" is so leaving it untouched.
I have proposed code to check and warn for both IPv4 and IPv6 address input that I can provide as soon as I understand the scope of parsing "name:host" input.<commit_after>
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.getaddrinfo(host, 0)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.gethostbyname(host)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
Support hostname that resolve to IPv6-only address
The current hostname validation does not cope with IPv6-only hostnames. Use getaddrinfo instead of gethostbyname to fix this. getaddrinfo raises the same exceptions and should work like a drop-in-replacement in this scenario.
We should also address the IPv4-only check for if then input is an IP-address but the use of split on ':' is problematic and prevents code to check for IPv6-addresses as input. I'm not sure what the thought behind allowing "name:host" is so leaving it untouched.
I have proposed code to check and warn for both IPv4 and IPv6 address input that I can provide as soon as I understand the scope of parsing "name:host" input.import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.getaddrinfo(host, 0)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
<commit_before>import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.gethostbyname(host)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
<commit_msg>Support hostname that resolve to IPv6-only address
The current hostname validation does not cope with IPv6-only hostnames. Use getaddrinfo instead of gethostbyname to fix this. getaddrinfo raises the same exceptions and should work like a drop-in-replacement in this scenario.
We should also address the IPv4-only check for if then input is an IP-address but the use of split on ':' is problematic and prevents code to check for IPv6-addresses as input. I'm not sure what the thought behind allowing "name:host" is so leaving it untouched.
I have proposed code to check and warn for both IPv4 and IPv6 address input that I can provide as soon as I understand the scope of parsing "name:host" input.<commit_after>import socket
import argparse
import re
class RegexMatch(object):
"""
Performs regular expression match on value.
If the regular expression pattern matches it will it will return an error
message that will work with argparse.
"""
def __init__(self, pattern, statement=None):
self.string_pattern = pattern
self.pattern = re.compile(pattern)
self.statement = statement
if not self.statement:
self.statement = "must match pattern %s" % self.string_pattern
def __call__(self, string):
match = self.pattern.search(string)
if match:
raise argparse.ArgumentError(None, self.statement)
return string
class Hostname(object):
"""
Checks wether a given hostname is resolvable in DNS, otherwise raising and
argparse error.
"""
def __init__(self, _socket=None):
self.socket = _socket or socket # just used for testing
def __call__(self, string):
parts = string.split(':')
name = parts[0]
host = parts[-1]
try:
self.socket.getaddrinfo(host, 0)
except self.socket.gaierror:
msg = "hostname: %s is not resolvable" % host
raise argparse.ArgumentError(None, msg)
try:
self.socket.inet_aton(name)
except self.socket.error:
return string # not an IP
else:
msg = '%s must be a hostname not an IP' % name
raise argparse.ArgumentError(None, msg)
return string
|
c082edf34a51fd0e587a8fbc7bcf4cf18838462d
|
modules/libmagic.py
|
modules/libmagic.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode(encoding='UTF-8', errors='replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode('UTF-8', 'replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
Fix for python 2.6 support
|
Fix for python 2.6 support
|
Python
|
mpl-2.0
|
jmlong1027/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,MITRECND/multiscanner,MITRECND/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode(encoding='UTF-8', errors='replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
Fix for python 2.6 support
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode('UTF-8', 'replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode(encoding='UTF-8', errors='replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
<commit_msg>Fix for python 2.6 support<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode('UTF-8', 'replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode(encoding='UTF-8', errors='replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
Fix for python 2.6 support# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode('UTF-8', 'replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode(encoding='UTF-8', errors='replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
<commit_msg>Fix for python 2.6 support<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import magic
except:
print("python-magic module not installed...")
magic = False
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "libmagic"
DEFAULTCONF = {
'magicfile':None,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if magic:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if conf['magicfile']:
try:
maaagic = magic.Magic(magic_file=conf['magicfile'])
except:
print("ERROR: Failed to use magic file", conf['magicfile'])
maaagic = magic.Magic()
else:
maaagic = magic.Magic()
results = []
for fname in filelist:
results.append((fname, maaagic.from_file(fname).decode('UTF-8', 'replace')))
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (results, metadata)
|
5afd5ee8a7ff1b0a6720b57605140ec279da123f
|
delivercute/production_settings.py
|
delivercute/production_settings.py
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
# DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
Debug off for production settings.
|
Debug off for production settings.
|
Python
|
mit
|
WillWeatherford/deliver-cute,WillWeatherford/deliver-cute
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
# DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
Debug off for production settings.
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
<commit_before>"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
# DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
<commit_msg>Debug off for production settings.<commit_after>
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
# DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
Debug off for production settings."""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
<commit_before>"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
# DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
<commit_msg>Debug off for production settings.<commit_after>"""Overwrite and add settings specifically for production deployed instance."""
from delivercute.settings import *
DEBUG = False
ALLOWED_HOSTS.append('.us-west-2.compute.amazonaws.com')
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = ()
|
d9cf7b736416f942a7bb9c164d99fdb3b4de1b08
|
leapday/templatetags/leapday_extras.py
|
leapday/templatetags/leapday_extras.py
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
ret = ret[1:] + [ret[0]]
return ret
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
return ret
|
Fix reordering due to removal of Other
|
Fix reordering due to removal of Other
|
Python
|
mit
|
Zerack/zoll.me,Zerack/zoll.me
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
ret = ret[1:] + [ret[0]]
return ret
Fix reordering due to removal of Other
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
return ret
|
<commit_before>'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
ret = ret[1:] + [ret[0]]
return ret
<commit_msg>Fix reordering due to removal of Other<commit_after>
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
return ret
|
'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
ret = ret[1:] + [ret[0]]
return ret
Fix reordering due to removal of Other'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
return ret
|
<commit_before>'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
ret = ret[1:] + [ret[0]]
return ret
<commit_msg>Fix reordering due to removal of Other<commit_after>'''
James D. Zoll
4/15/2013
Purpose: Defines template tags for the Leapday Recipedia application.
License: This is a public work.
'''
from django import template
register = template.Library()
@register.filter()
def good_css_name(value):
'''
Returns the lower-case hyphen-replaced display name,
which used as the css class for the good.
Keyword Arguments:
value -> Good. The good to get the css class for.
'''
return value.display_name.lower().replace(' ','-')
@register.filter()
def base_goods_ordered_set(value):
'''
Returns a set of the base goods required for an object's creation,
ordered by the desired order. In this case, that order is value low
to high, with the "Other" good on the end instead of the front.
Additionally, this attempts first to load goods from the
shim_base_ingredients attribute, which is present in some situations
where the attribute has been preloaded to prevent excessive DB IO.
Keyword Arguments:
value -> Good. The good for which to return the list of ingredients.
'''
try:
base_goods = value.shim_base_ingredients
except:
base_goods = value.base_ingredients.all()
ret = sorted(base_goods, key=lambda x: x.ingredient.value)
return ret
|
bc50210afc3cfb43441fe431e34e04db612f87c7
|
importkit/yaml/schema.py
|
importkit/yaml/schema.py
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, meta):
if 'marshalled' in meta and meta['marshalled']:
return
cls.validatefile(meta['filename'])
@classmethod
def validatefile(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
Implement YAML file compilation into 'bytecode'
|
import: Implement YAML file compilation into 'bytecode'
Store serialized Python structures resulted from loading YAML source in
the .ymlc files, a-la .pyc
|
Python
|
mit
|
sprymix/importkit
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
import: Implement YAML file compilation into 'bytecode'
Store serialized Python structures resulted from loading YAML source in
the .ymlc files, a-la .pyc
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, meta):
if 'marshalled' in meta and meta['marshalled']:
return
cls.validatefile(meta['filename'])
@classmethod
def validatefile(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
<commit_before>import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
<commit_msg>import: Implement YAML file compilation into 'bytecode'
Store serialized Python structures resulted from loading YAML source in
the .ymlc files, a-la .pyc<commit_after>
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, meta):
if 'marshalled' in meta and meta['marshalled']:
return
cls.validatefile(meta['filename'])
@classmethod
def validatefile(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
import: Implement YAML file compilation into 'bytecode'
Store serialized Python structures resulted from loading YAML source in
the .ymlc files, a-la .pycimport subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, meta):
if 'marshalled' in meta and meta['marshalled']:
return
cls.validatefile(meta['filename'])
@classmethod
def validatefile(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
<commit_before>import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
<commit_msg>import: Implement YAML file compilation into 'bytecode'
Store serialized Python structures resulted from loading YAML source in
the .ymlc files, a-la .pyc<commit_after>import subprocess
class YamlValidationError(Exception): pass
class Base(object):
schema_file = ''
@classmethod
def validate(cls, meta):
if 'marshalled' in meta and meta['marshalled']:
return
cls.validatefile(meta['filename'])
@classmethod
def validatefile(cls, filename):
kwalify = subprocess.Popen(['kwalify', '-lf', cls.schema_file, filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = kwalify.communicate()
if stdout.find('INVALID') >= 0 or stderr.find('ERROR') >= 0:
raise YamlValidationError('Failed to validate file: %s\n\nValidator output: \n%s' %
(filename, stderr + stdout))
@classmethod
def _create_class(cls, meta, dct):
cls.schema_file = meta['filename']
return type(meta['class']['name'], (Base,), {'schema_file': meta['filename']})
|
a12076e0fd8dfd0e4d35802684bbd837ed2246b0
|
erpnext/hub_node/data_migration_mapping/item_to_hub_item/__init__.py
|
erpnext/hub_node/data_migration_mapping/item_to_hub_item/__init__.py
|
import io, base64, urllib, os
def pre_process(doc):
file_path = doc.image
file_name = os.path.basename(file_path)
if file_path.startswith('http'):
url = file_path
file_path = os.path.join('/tmp', file_name)
urllib.urlretrieve(url, file_path)
with io.open(file_path, 'rb') as f:
doc.image = base64.b64encode(f.read())
doc.image_file_name = file_name
return doc
|
Convert image to base64 before sending Item to sync
|
Convert image to base64 before sending Item to sync
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
Convert image to base64 before sending Item to sync
|
import io, base64, urllib, os
def pre_process(doc):
file_path = doc.image
file_name = os.path.basename(file_path)
if file_path.startswith('http'):
url = file_path
file_path = os.path.join('/tmp', file_name)
urllib.urlretrieve(url, file_path)
with io.open(file_path, 'rb') as f:
doc.image = base64.b64encode(f.read())
doc.image_file_name = file_name
return doc
|
<commit_before><commit_msg>Convert image to base64 before sending Item to sync<commit_after>
|
import io, base64, urllib, os
def pre_process(doc):
file_path = doc.image
file_name = os.path.basename(file_path)
if file_path.startswith('http'):
url = file_path
file_path = os.path.join('/tmp', file_name)
urllib.urlretrieve(url, file_path)
with io.open(file_path, 'rb') as f:
doc.image = base64.b64encode(f.read())
doc.image_file_name = file_name
return doc
|
Convert image to base64 before sending Item to syncimport io, base64, urllib, os
def pre_process(doc):
file_path = doc.image
file_name = os.path.basename(file_path)
if file_path.startswith('http'):
url = file_path
file_path = os.path.join('/tmp', file_name)
urllib.urlretrieve(url, file_path)
with io.open(file_path, 'rb') as f:
doc.image = base64.b64encode(f.read())
doc.image_file_name = file_name
return doc
|
<commit_before><commit_msg>Convert image to base64 before sending Item to sync<commit_after>import io, base64, urllib, os
def pre_process(doc):
file_path = doc.image
file_name = os.path.basename(file_path)
if file_path.startswith('http'):
url = file_path
file_path = os.path.join('/tmp', file_name)
urllib.urlretrieve(url, file_path)
with io.open(file_path, 'rb') as f:
doc.image = base64.b64encode(f.read())
doc.image_file_name = file_name
return doc
|
|
e0109cdb52f02f1e8963849adeb42311cef2aa6c
|
gratipay/renderers/jinja2_htmlescaped.py
|
gratipay/renderers/jinja2_htmlescaped.py
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = context['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
Make htmlescape function available in templates
|
Make htmlescape function available in templates
For when we explicitly call it.
|
Python
|
mit
|
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
Make htmlescape function available in templates
For when we explicitly call it.
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = context['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
<commit_before>import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
<commit_msg>Make htmlescape function available in templates
For when we explicitly call it.<commit_after>
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = context['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
Make htmlescape function available in templates
For when we explicitly call it.import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = context['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
<commit_before>import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
<commit_msg>Make htmlescape function available in templates
For when we explicitly call it.<commit_after>import aspen_jinja2_renderer as base
from markupsafe import escape as htmlescape
class HTMLRenderer(base.Renderer):
def render_content(self, context):
# Extend to inject an HTML-escaping function. Since autoescape is on,
# template authors shouldn't normally need to use this function, but
# having it in the simplate context makes it easier to implement i18n.
context['state']['escape'] = context['escape'] = htmlescape
return base.Renderer.render_content(self, context)
class Factory(base.Factory):
Renderer = HTMLRenderer
def compile_meta(self, configuration):
# Override to turn on autoescaping.
loader = base.FileSystemLoader(configuration.project_root)
return base.Environment( loader=loader
, autoescape=True
, extensions=['jinja2.ext.autoescape']
)
|
1da4245cbc25a609b006254714ae273b6a6824e0
|
retools/__init__.py
|
retools/__init__.py
|
#
|
"""retools
This module holds a default Redis instance, which can be
configured process-wide::
from retools import Connection
Connection.set_default(host='127.0.0.1', db=0, **kwargs)
"""
from redis import Redis
from retools.redconn import Connection
__all__ = ['Connection']
class Connection(object):
redis = None
@classmethod
def set_default(cls, host='localhost', port=6379, db=0, password=None):
Connection.redis = Redis(host=host, port=port, db=db,
password=password)
@classmethod
def get_default(cls):
return Connection.redis
|
Update global lock, default connection
|
Update global lock, default connection
|
Python
|
mit
|
mozilla-services/retools,bbangert/retools,0x1997/retools
|
#
Update global lock, default connection
|
"""retools
This module holds a default Redis instance, which can be
configured process-wide::
from retools import Connection
Connection.set_default(host='127.0.0.1', db=0, **kwargs)
"""
from redis import Redis
from retools.redconn import Connection
__all__ = ['Connection']
class Connection(object):
redis = None
@classmethod
def set_default(cls, host='localhost', port=6379, db=0, password=None):
Connection.redis = Redis(host=host, port=port, db=db,
password=password)
@classmethod
def get_default(cls):
return Connection.redis
|
<commit_before>#
<commit_msg>Update global lock, default connection<commit_after>
|
"""retools
This module holds a default Redis instance, which can be
configured process-wide::
from retools import Connection
Connection.set_default(host='127.0.0.1', db=0, **kwargs)
"""
from redis import Redis
from retools.redconn import Connection
__all__ = ['Connection']
class Connection(object):
redis = None
@classmethod
def set_default(cls, host='localhost', port=6379, db=0, password=None):
Connection.redis = Redis(host=host, port=port, db=db,
password=password)
@classmethod
def get_default(cls):
return Connection.redis
|
#
Update global lock, default connection"""retools
This module holds a default Redis instance, which can be
configured process-wide::
from retools import Connection
Connection.set_default(host='127.0.0.1', db=0, **kwargs)
"""
from redis import Redis
from retools.redconn import Connection
__all__ = ['Connection']
class Connection(object):
redis = None
@classmethod
def set_default(cls, host='localhost', port=6379, db=0, password=None):
Connection.redis = Redis(host=host, port=port, db=db,
password=password)
@classmethod
def get_default(cls):
return Connection.redis
|
<commit_before>#
<commit_msg>Update global lock, default connection<commit_after>"""retools
This module holds a default Redis instance, which can be
configured process-wide::
from retools import Connection
Connection.set_default(host='127.0.0.1', db=0, **kwargs)
"""
from redis import Redis
from retools.redconn import Connection
__all__ = ['Connection']
class Connection(object):
redis = None
@classmethod
def set_default(cls, host='localhost', port=6379, db=0, password=None):
Connection.redis = Redis(host=host, port=port, db=db,
password=password)
@classmethod
def get_default(cls):
return Connection.redis
|
7d9690b974263ba499d026eabee504b5bd6cb8ac
|
InvenTree/plugin/views.py
|
InvenTree/plugin/views.py
|
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
import logging
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
logger = logging.getLogger('inventree')
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
logger.error(f"Plugin '{plug.slug}' could not render custom panels at '{self.request.path}'")
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
Add logging message when plugin fails to render custom panels
|
Add logging message when plugin fails to render custom panels
|
Python
|
mit
|
inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree
|
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
Add logging message when plugin fails to render custom panels
|
import logging
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
logger = logging.getLogger('inventree')
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
logger.error(f"Plugin '{plug.slug}' could not render custom panels at '{self.request.path}'")
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
<commit_before>import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
<commit_msg>Add logging message when plugin fails to render custom panels<commit_after>
|
import logging
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
logger = logging.getLogger('inventree')
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
logger.error(f"Plugin '{plug.slug}' could not render custom panels at '{self.request.path}'")
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
Add logging message when plugin fails to render custom panelsimport logging
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
logger = logging.getLogger('inventree')
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
logger.error(f"Plugin '{plug.slug}' could not render custom panels at '{self.request.path}'")
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
<commit_before>import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
<commit_msg>Add logging message when plugin fails to render custom panels<commit_after>import logging
import sys
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
from error_report.models import Error
from plugin.registry import registry
logger = logging.getLogger('inventree')
class InvenTreePluginViewMixin:
"""
Custom view mixin which adds context data to the view,
based on loaded plugins.
This allows rendered pages to be augmented by loaded plugins.
"""
def get_plugin_panels(self, ctx):
"""
Return a list of extra 'plugin panels' associated with this view
"""
panels = []
for plug in registry.with_mixin('panel'):
try:
panels += plug.render_panels(self, self.request, ctx)
except Exception:
# Prevent any plugin error from crashing the page render
kind, info, data = sys.exc_info()
# Log the error to the database
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=self.request.path,
html=ExceptionReporter(self.request, kind, info, data).get_traceback_html(),
)
logger.error(f"Plugin '{plug.slug}' could not render custom panels at '{self.request.path}'")
return panels
def get_context_data(self, **kwargs):
"""
Add plugin context data to the view
"""
ctx = super().get_context_data(**kwargs)
if settings.PLUGINS_ENABLED:
ctx['plugin_panels'] = self.get_plugin_panels(ctx)
return ctx
|
7c255cf7b8ac7d79b63c2c91cf2ae3a233cc14f8
|
genomic_neuralnet/analyses/optimize_ols_prediction.py
|
genomic_neuralnet/analyses/optimize_ols_prediction.py
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_en.shelf', 'OLS')
if __name__ == '__main__':
main()
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_lr.shelf', 'OLS')
if __name__ == '__main__':
main()
|
Write OLS results to correct shelf
|
Write OLS results to correct shelf
|
Python
|
mit
|
rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_en.shelf', 'OLS')
if __name__ == '__main__':
main()
Write OLS results to correct shelf
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_lr.shelf', 'OLS')
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_en.shelf', 'OLS')
if __name__ == '__main__':
main()
<commit_msg>Write OLS results to correct shelf<commit_after>
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_lr.shelf', 'OLS')
if __name__ == '__main__':
main()
|
from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_en.shelf', 'OLS')
if __name__ == '__main__':
main()
Write OLS results to correct shelffrom __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_lr.shelf', 'OLS')
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_en.shelf', 'OLS')
if __name__ == '__main__':
main()
<commit_msg>Write OLS results to correct shelf<commit_after>from __future__ import print_function
from genomic_neuralnet.methods import get_lr_prediction
from genomic_neuralnet.analyses import run_optimization
def main():
params = {}
run_optimization(get_lr_prediction, params, 'optimal_lr.shelf', 'OLS')
if __name__ == '__main__':
main()
|
0b89da2ea93051ffdd47498bc047cc07885c2957
|
opps/boxes/models.py
|
opps/boxes/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
channel = models.ForeignKey(
'channels.Channel',
verbose_name=_(u"Channel"),
)
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
Add field channel on QuerySet boxes
|
Add field channel on QuerySet boxes
|
Python
|
mit
|
YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
Add field channel on QuerySet boxes
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
channel = models.ForeignKey(
'channels.Channel',
verbose_name=_(u"Channel"),
)
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
<commit_msg>Add field channel on QuerySet boxes<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
channel = models.ForeignKey(
'channels.Channel',
verbose_name=_(u"Channel"),
)
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
Add field channel on QuerySet boxes#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
channel = models.ForeignKey(
'channels.Channel',
verbose_name=_(u"Channel"),
)
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
<commit_msg>Add field channel on QuerySet boxes<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
channel = models.ForeignKey(
'channels.Channel',
verbose_name=_(u"Channel"),
)
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
66604e749349e37eb1e59168d00f52ed7da23029
|
dragonflow/db/neutron/models.py
|
dragonflow/db/neutron/models.py
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=sa.func.utc_timestamp())
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import expression
from sqlalchemy import types
class utc_timestamp(expression.FunctionElement):
type = types.DateTime()
@compiles(utc_timestamp, 'postgresql')
def pg_utc_timestamp(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utc_timestamp, 'mssql')
def ms_utc_timestamp(element, compiler, **kw):
return "GETUTCDATE()"
@compiles(utc_timestamp, 'mysql')
def my_utc_timestamp(element, compiler, **kw):
return "UTC_TIMESTAMP()"
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=utc_timestamp())
|
Implement utc_timeout for PGSQL and MSSQL
|
Implement utc_timeout for PGSQL and MSSQL
UTC_TIMESTAMP is a MySQL specific function. Use SQLAlchemy to implement it
also in MSSQL and PGSQL.
Change-Id: If8673b543da2a89a2bad87daff2429cb09c735aa
Closes-Bug: #1700873
|
Python
|
apache-2.0
|
openstack/dragonflow,openstack/dragonflow,openstack/dragonflow
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=sa.func.utc_timestamp())
Implement utc_timeout for PGSQL and MSSQL
UTC_TIMESTAMP is a MySQL specific function. Use SQLAlchemy to implement it
also in MSSQL and PGSQL.
Change-Id: If8673b543da2a89a2bad87daff2429cb09c735aa
Closes-Bug: #1700873
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import expression
from sqlalchemy import types
class utc_timestamp(expression.FunctionElement):
type = types.DateTime()
@compiles(utc_timestamp, 'postgresql')
def pg_utc_timestamp(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utc_timestamp, 'mssql')
def ms_utc_timestamp(element, compiler, **kw):
return "GETUTCDATE()"
@compiles(utc_timestamp, 'mysql')
def my_utc_timestamp(element, compiler, **kw):
return "UTC_TIMESTAMP()"
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=utc_timestamp())
|
<commit_before># Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=sa.func.utc_timestamp())
<commit_msg>Implement utc_timeout for PGSQL and MSSQL
UTC_TIMESTAMP is a MySQL specific function. Use SQLAlchemy to implement it
also in MSSQL and PGSQL.
Change-Id: If8673b543da2a89a2bad87daff2429cb09c735aa
Closes-Bug: #1700873<commit_after>
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import expression
from sqlalchemy import types
class utc_timestamp(expression.FunctionElement):
type = types.DateTime()
@compiles(utc_timestamp, 'postgresql')
def pg_utc_timestamp(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utc_timestamp, 'mssql')
def ms_utc_timestamp(element, compiler, **kw):
return "GETUTCDATE()"
@compiles(utc_timestamp, 'mysql')
def my_utc_timestamp(element, compiler, **kw):
return "UTC_TIMESTAMP()"
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=utc_timestamp())
|
# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=sa.func.utc_timestamp())
Implement utc_timeout for PGSQL and MSSQL
UTC_TIMESTAMP is a MySQL specific function. Use SQLAlchemy to implement it
also in MSSQL and PGSQL.
Change-Id: If8673b543da2a89a2bad87daff2429cb09c735aa
Closes-Bug: #1700873# Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import expression
from sqlalchemy import types
class utc_timestamp(expression.FunctionElement):
type = types.DateTime()
@compiles(utc_timestamp, 'postgresql')
def pg_utc_timestamp(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utc_timestamp, 'mssql')
def ms_utc_timestamp(element, compiler, **kw):
return "GETUTCDATE()"
@compiles(utc_timestamp, 'mysql')
def my_utc_timestamp(element, compiler, **kw):
return "UTC_TIMESTAMP()"
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=utc_timestamp())
|
<commit_before># Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=sa.func.utc_timestamp())
<commit_msg>Implement utc_timeout for PGSQL and MSSQL
UTC_TIMESTAMP is a MySQL specific function. Use SQLAlchemy to implement it
also in MSSQL and PGSQL.
Change-Id: If8673b543da2a89a2bad87daff2429cb09c735aa
Closes-Bug: #1700873<commit_after># Copyright (c) 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import expression
from sqlalchemy import types
class utc_timestamp(expression.FunctionElement):
type = types.DateTime()
@compiles(utc_timestamp, 'postgresql')
def pg_utc_timestamp(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utc_timestamp, 'mssql')
def ms_utc_timestamp(element, compiler, **kw):
return "GETUTCDATE()"
@compiles(utc_timestamp, 'mysql')
def my_utc_timestamp(element, compiler, **kw):
return "UTC_TIMESTAMP()"
class DFLockedObjects(model_base.BASEV2):
__tablename__ = 'dflockedobjects'
object_uuid = sa.Column(sa.String(36), primary_key=True)
lock = sa.Column(sa.Boolean, default=False)
session_id = sa.Column(sa.BigInteger, default=0)
created_at = sa.Column(sa.DateTime, onupdate=utc_timestamp())
|
d2971af14f57e925e1500da9ede42adb34d0dc62
|
tastycrust/authentication.py
|
tastycrust/authentication.py
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
anonymous_allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.anonymous_allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
allowed_methods = [s.upper() for s in self.anonymous_allowed_methods]
if request.method in allowed_methods:
return True
return False
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
return (request.method in [s.upper() for s in self.allowed_methods])
|
Change some naming in AnonymousAuthentication
|
Change some naming in AnonymousAuthentication
|
Python
|
bsd-3-clause
|
uranusjr/django-tastypie-crust
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
anonymous_allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.anonymous_allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
allowed_methods = [s.upper() for s in self.anonymous_allowed_methods]
if request.method in allowed_methods:
return True
return False
Change some naming in AnonymousAuthentication
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
return (request.method in [s.upper() for s in self.allowed_methods])
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
anonymous_allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.anonymous_allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
allowed_methods = [s.upper() for s in self.anonymous_allowed_methods]
if request.method in allowed_methods:
return True
return False
<commit_msg>Change some naming in AnonymousAuthentication<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
return (request.method in [s.upper() for s in self.allowed_methods])
|
#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
anonymous_allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.anonymous_allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
allowed_methods = [s.upper() for s in self.anonymous_allowed_methods]
if request.method in allowed_methods:
return True
return False
Change some naming in AnonymousAuthentication#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
return (request.method in [s.upper() for s in self.allowed_methods])
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
anonymous_allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.anonymous_allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
allowed_methods = [s.upper() for s in self.anonymous_allowed_methods]
if request.method in allowed_methods:
return True
return False
<commit_msg>Change some naming in AnonymousAuthentication<commit_after>#!/usr/bin/env python
# -*- coding: utf-8
class AnonymousAuthentication(object):
allowed_methods = ['GET']
def __init__(self, allowed=None):
if allowed is not None:
self.allowed_methods = allowed
def is_authenticated(self, request, **kwargs):
return (request.method in [s.upper() for s in self.allowed_methods])
|
fa885b929f8323c88228dbc4d40ca286d49ee286
|
test_project/blog/api.py
|
test_project/blog/api.py
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment, SmartTag
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
class SmartTagResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
class Meta:
queryset = SmartTag.objects.all()
resource_name = 'smart-tag'
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
api.register(SmartTagResource())
|
Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.
|
Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.
|
Python
|
bsd-3-clause
|
juanique/django-chocolate,juanique/django-chocolate,juanique/django-chocolate
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment, SmartTag
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
class SmartTagResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
class Meta:
queryset = SmartTag.objects.all()
resource_name = 'smart-tag'
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
api.register(SmartTagResource())
|
<commit_before>from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
<commit_msg>Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.<commit_after>
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment, SmartTag
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
class SmartTagResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
class Meta:
queryset = SmartTag.objects.all()
resource_name = 'smart-tag'
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
api.register(SmartTagResource())
|
from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment, SmartTag
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
class SmartTagResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
class Meta:
queryset = SmartTag.objects.all()
resource_name = 'smart-tag'
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
api.register(SmartTagResource())
|
<commit_before>from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
<commit_msg>Create corresponding SmartTag resource with explicitly defined 'resource_name' attribute that will be used for its TastyFactory key.<commit_after>from tastypie.resources import ModelResource
from tastypie.api import Api
from tastypie import fields
from models import Entry, Comment, SmartTag
class EntryResource(ModelResource):
class Meta:
queryset = Entry.objects.all()
class CommentResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
upvotes = fields.IntegerField(readonly=True)
class Meta:
queryset = Comment.objects.all()
class SmartTagResource(ModelResource):
entry = fields.ForeignKey("blog.api.EntryResource", attribute="entry")
class Meta:
queryset = SmartTag.objects.all()
resource_name = 'smart-tag'
api = Api(api_name="v1")
api.register(EntryResource())
api.register(CommentResource())
api.register(SmartTagResource())
|
185dcb9db26bd3dc5f76faebb4d56c7abb87f87f
|
test/parseJaguar.py
|
test/parseJaguar.py
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
Test the parsing of all of the uploaded Jaguar files
|
Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27
|
Python
|
lgpl-2.1
|
Clyde-fare/cclib_bak,Clyde-fare/cclib_bak
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
<commit_before>import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
<commit_msg>Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27<commit_after>
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
<commit_before>import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
<commit_msg>Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27<commit_after>import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
f5d87a37ece8708735591f0d26213a6b7fd1a191
|
etc/ci/check_dynamic_symbols.py
|
etc/ci/check_dynamic_symbols.py
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = "\n".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary:\n{0}".format(human_readable_difference))
sys.exit(-1)
|
Put each unexpected dynamic symbols on its own line
|
Put each unexpected dynamic symbols on its own line
|
Python
|
mpl-2.0
|
thiagopnts/servo,DominoTree/servo,peterjoel/servo,anthgur/servo,larsbergstrom/servo,nnethercote/servo,thiagopnts/servo,szeged/servo,paulrouget/servo,thiagopnts/servo,pyfisch/servo,pyfisch/servo,KiChjang/servo,CJ8664/servo,thiagopnts/servo,pyfisch/servo,fiji-flo/servo,notriddle/servo,ConnorGBrewster/servo,sadmansk/servo,avadacatavra/servo,nnethercote/servo,nnethercote/servo,splav/servo,dsandeephegde/servo,emilio/servo,rnestler/servo,canaltinova/servo,anthgur/servo,upsuper/servo,emilio/servo,emilio/servo,nnethercote/servo,eddyb/servo,sadmansk/servo,larsbergstrom/servo,emilio/servo,KiChjang/servo,jimberlage/servo,szeged/servo,fiji-flo/servo,szeged/servo,saneyuki/servo,notriddle/servo,larsbergstrom/servo,canaltinova/servo,splav/servo,avadacatavra/servo,mbrubeck/servo,fiji-flo/servo,CJ8664/servo,avadacatavra/servo,anthgur/servo,larsbergstrom/servo,ConnorGBrewster/servo,mbrubeck/servo,nrc/servo,avadacatavra/servo,jimberlage/servo,ConnorGBrewster/servo,fiji-flo/servo,emilio/servo,anthgur/servo,dsandeephegde/servo,avadacatavra/servo,SimonSapin/servo,mattnenterprise/servo,DominoTree/servo,cbrewster/servo,dsandeephegde/servo,canaltinova/servo,thiagopnts/servo,splav/servo,DominoTree/servo,ConnorGBrewster/servo,dati91/servo,mattnenterprise/servo,fiji-flo/servo,sadmansk/servo,DominoTree/servo,paulrouget/servo,CJ8664/servo,peterjoel/servo,nrc/servo,eddyb/servo,mattnenterprise/servo,paulrouget/servo,mattnenterprise/servo,DominoTree/servo,notriddle/servo,thiagopnts/servo,splav/servo,larsbergstrom/servo,cbrewster/servo,cbrewster/servo,canaltinova/servo,anthgur/servo,peterjoel/servo,szeged/servo,mbrubeck/servo,CJ8664/servo,cbrewster/servo,KiChjang/servo,anthgur/servo,pyfisch/servo,paulrouget/servo,KiChjang/servo,eddyb/servo,mbrubeck/servo,szeged/servo,dati91/servo,saneyuki/servo,nrc/servo,mbrubeck/servo,thiagopnts/servo,peterjoel/servo,ConnorGBrewster/servo,eddyb/servo,rnestler/servo,DominoTree/servo,dati91/servo,emilio/servo,cbrewster/servo,upsuper/servo,mattnenterprise/servo,mattnenterprise/servo,dsandeephegde/servo,larsbergstrom/servo,sadmansk/servo,dsandeephegde/servo,eddyb/servo,jimberlage/servo,anthgur/servo,dsandeephegde/servo,avadacatavra/servo,saneyuki/servo,saneyuki/servo,splav/servo,KiChjang/servo,notriddle/servo,larsbergstrom/servo,emilio/servo,nrc/servo,peterjoel/servo,rnestler/servo,fiji-flo/servo,DominoTree/servo,mattnenterprise/servo,SimonSapin/servo,szeged/servo,fiji-flo/servo,saneyuki/servo,notriddle/servo,saneyuki/servo,larsbergstrom/servo,cbrewster/servo,nnethercote/servo,DominoTree/servo,rnestler/servo,pyfisch/servo,fiji-flo/servo,notriddle/servo,nnethercote/servo,rnestler/servo,nrc/servo,dati91/servo,saneyuki/servo,jimberlage/servo,KiChjang/servo,rnestler/servo,ConnorGBrewster/servo,ConnorGBrewster/servo,eddyb/servo,szeged/servo,jimberlage/servo,jimberlage/servo,paulrouget/servo,CJ8664/servo,eddyb/servo,peterjoel/servo,saneyuki/servo,splav/servo,notriddle/servo,SimonSapin/servo,pyfisch/servo,mbrubeck/servo,szeged/servo,splav/servo,anthgur/servo,emilio/servo,emilio/servo,nnethercote/servo,paulrouget/servo,DominoTree/servo,pyfisch/servo,szeged/servo,dsandeephegde/servo,larsbergstrom/servo,mattnenterprise/servo,pyfisch/servo,avadacatavra/servo,splav/servo,peterjoel/servo,larsbergstrom/servo,SimonSapin/servo,CJ8664/servo,jimberlage/servo,KiChjang/servo,szeged/servo,upsuper/servo,mbrubeck/servo,nnethercote/servo,nrc/servo,paulrouget/servo,splav/servo,dati91/servo,upsuper/servo,peterjoel/servo,sadmansk/servo,upsuper/servo,paulrouget/servo,upsuper/servo,splav/servo,rnestler/servo,pyfisch/servo,notriddle/servo,peterjoel/servo,emilio/servo,avadacatavra/servo,nrc/servo,SimonSapin/servo,dati91/servo,mbrubeck/servo,SimonSapin/servo,nnethercote/servo,notriddle/servo,SimonSapin/servo,saneyuki/servo,nnethercote/servo,dati91/servo,DominoTree/servo,sadmansk/servo,canaltinova/servo,jimberlage/servo,upsuper/servo,jimberlage/servo,canaltinova/servo,upsuper/servo,paulrouget/servo,dati91/servo,SimonSapin/servo,sadmansk/servo,paulrouget/servo,peterjoel/servo,rnestler/servo,saneyuki/servo,KiChjang/servo,sadmansk/servo,KiChjang/servo,eddyb/servo,CJ8664/servo,ConnorGBrewster/servo,cbrewster/servo,notriddle/servo,cbrewster/servo,thiagopnts/servo,CJ8664/servo,pyfisch/servo,KiChjang/servo,dsandeephegde/servo,canaltinova/servo,canaltinova/servo
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
Put each unexpected dynamic symbols on its own line
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = "\n".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary:\n{0}".format(human_readable_difference))
sys.exit(-1)
|
<commit_before># Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
<commit_msg>Put each unexpected dynamic symbols on its own line<commit_after>
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = "\n".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary:\n{0}".format(human_readable_difference))
sys.exit(-1)
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
Put each unexpected dynamic symbols on its own line# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = "\n".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary:\n{0}".format(human_readable_difference))
sys.exit(-1)
|
<commit_before># Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
<commit_msg>Put each unexpected dynamic symbols on its own line<commit_after># Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = "\n".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary:\n{0}".format(human_readable_difference))
sys.exit(-1)
|
bad670aebbdeeb029a40762aae80eec1100268a2
|
data_log/management/commands/generate_report_fixture.py
|
data_log/management/commands/generate_report_fixture.py
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.WARNING(models.Report.__name__))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
pks = []
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
serialized_data = json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
pks += [d['pk'] for d in serialized_data]
data += serialized_data
self.stdout.write(self.style.WARNING(models.Report.__name__))
reports = json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.HTTP_INFO("Finishing special reports..."))
reports += json.loads(j.serialize(models.Report.objects.filter(pk__in=pks)))
data = reports + data
self.stdout.write(self.style.HTTP_INFO('Saving fixtures to file'))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
Fix data log fixture foreign keys
|
Fix data log fixture foreign keys
|
Python
|
apache-2.0
|
porksmash/swarfarm,porksmash/swarfarm,porksmash/swarfarm,porksmash/swarfarm
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.WARNING(models.Report.__name__))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
Fix data log fixture foreign keys
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
pks = []
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
serialized_data = json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
pks += [d['pk'] for d in serialized_data]
data += serialized_data
self.stdout.write(self.style.WARNING(models.Report.__name__))
reports = json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.HTTP_INFO("Finishing special reports..."))
reports += json.loads(j.serialize(models.Report.objects.filter(pk__in=pks)))
data = reports + data
self.stdout.write(self.style.HTTP_INFO('Saving fixtures to file'))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
<commit_before>from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.WARNING(models.Report.__name__))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
<commit_msg>Fix data log fixture foreign keys<commit_after>
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
pks = []
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
serialized_data = json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
pks += [d['pk'] for d in serialized_data]
data += serialized_data
self.stdout.write(self.style.WARNING(models.Report.__name__))
reports = json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.HTTP_INFO("Finishing special reports..."))
reports += json.loads(j.serialize(models.Report.objects.filter(pk__in=pks)))
data = reports + data
self.stdout.write(self.style.HTTP_INFO('Saving fixtures to file'))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.WARNING(models.Report.__name__))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
Fix data log fixture foreign keysfrom django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
pks = []
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
serialized_data = json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
pks += [d['pk'] for d in serialized_data]
data += serialized_data
self.stdout.write(self.style.WARNING(models.Report.__name__))
reports = json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.HTTP_INFO("Finishing special reports..."))
reports += json.loads(j.serialize(models.Report.objects.filter(pk__in=pks)))
data = reports + data
self.stdout.write(self.style.HTTP_INFO('Saving fixtures to file'))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
<commit_before>from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.WARNING(models.Report.__name__))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
<commit_msg>Fix data log fixture foreign keys<commit_after>from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.HTTP_INFO('Creating fixtures for Data Log Reports...'))
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
pks = []
for model in models_to_serialize:
self.stdout.write(self.style.WARNING(model.__name__))
serialized_data = json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
pks += [d['pk'] for d in serialized_data]
data += serialized_data
self.stdout.write(self.style.WARNING(models.Report.__name__))
reports = json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:100]))
self.stdout.write(self.style.HTTP_INFO("Finishing special reports..."))
reports += json.loads(j.serialize(models.Report.objects.filter(pk__in=pks)))
data = reports + data
self.stdout.write(self.style.HTTP_INFO('Saving fixtures to file'))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
|
6309031090a135856e6e2b3f8381202d6d17b72f
|
test_app/signals.py
|
test_app/signals.py
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
send_to_hipchat(event.render())
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
# if a template exists for the event_type, then send the output
# as a normal notification, in 'yellow'
# if no template exists, send a notification in 'red'
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
rendered = event.render()
color = "yellow" if rendered else "red"
html = rendered or (
u"No template available for '%s'"
% event.event_type
)
send_to_hipchat(html, color=color)
|
Send unknown events to HipChat in red
|
Send unknown events to HipChat in red
If an event comes in to the test_app and has no matching template,
then send it to HipChat in red, not yellow, making it easier to
manage.
|
Python
|
mit
|
yunojuno/django-trello-webhooks,yunojuno/django-trello-webhooks
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
send_to_hipchat(event.render())
Send unknown events to HipChat in red
If an event comes in to the test_app and has no matching template,
then send it to HipChat in red, not yellow, making it easier to
manage.
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
# if a template exists for the event_type, then send the output
# as a normal notification, in 'yellow'
# if no template exists, send a notification in 'red'
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
rendered = event.render()
color = "yellow" if rendered else "red"
html = rendered or (
u"No template available for '%s'"
% event.event_type
)
send_to_hipchat(html, color=color)
|
<commit_before># # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
send_to_hipchat(event.render())
<commit_msg>Send unknown events to HipChat in red
If an event comes in to the test_app and has no matching template,
then send it to HipChat in red, not yellow, making it easier to
manage.<commit_after>
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
# if a template exists for the event_type, then send the output
# as a normal notification, in 'yellow'
# if no template exists, send a notification in 'red'
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
rendered = event.render()
color = "yellow" if rendered else "red"
html = rendered or (
u"No template available for '%s'"
% event.event_type
)
send_to_hipchat(html, color=color)
|
# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
send_to_hipchat(event.render())
Send unknown events to HipChat in red
If an event comes in to the test_app and has no matching template,
then send it to HipChat in red, not yellow, making it easier to
manage.# # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
# if a template exists for the event_type, then send the output
# as a normal notification, in 'yellow'
# if no template exists, send a notification in 'red'
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
rendered = event.render()
color = "yellow" if rendered else "red"
html = rendered or (
u"No template available for '%s'"
% event.event_type
)
send_to_hipchat(html, color=color)
|
<commit_before># # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
send_to_hipchat(event.render())
<commit_msg>Send unknown events to HipChat in red
If an event comes in to the test_app and has no matching template,
then send it to HipChat in red, not yellow, making it easier to
manage.<commit_after># # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.dispatch import receiver
from trello_webhooks.signals import callback_received
from test_app.hipchat import send_to_hipchat
logger = logging.getLogger(__name__)
@receiver(callback_received, dispatch_uid="callback_received")
def on_callback_received(sender, **kwargs):
# if a template exists for the event_type, then send the output
# as a normal notification, in 'yellow'
# if no template exists, send a notification in 'red'
event = kwargs.pop('event')
if settings.HIPCHAT_ENABLED:
rendered = event.render()
color = "yellow" if rendered else "red"
html = rendered or (
u"No template available for '%s'"
% event.event_type
)
send_to_hipchat(html, color=color)
|
40f140682a902957d5875c8afc88e16bc327367f
|
tests/test_cat2cohort.py
|
tests/test_cat2cohort.py
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
Move unit tests data in setUp
|
Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism.
|
Python
|
mit
|
danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism.
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
<commit_before>"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
<commit_msg>Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism.<commit_after>
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism."""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
<commit_before>"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
<commit_msg>Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism.<commit_after>"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
b52f0e9fe2c9e41205a8d703985ac39ab3524a8a
|
tests/blueprints/test_entity.py
|
tests/blueprints/test_entity.py
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.user = Entity(name="testuser")
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head("/testuser/")
self.assertIn("/testuser/profile", r.headers['Link'])
def test_entity_profile_json (self):
r = self.client.get("/testuser/profile")
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.name = 'testuser'
self.user = Entity(name=self.name)
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head('/' + self.name)
self.assertIn('/' + self.name + 'profile', r.headers['Link'])
def test_entity_link_404 (self):
self.assertStatus(self.client.head('/non-existent-user'), 404)
def test_entity_profile_404 (self):
self.assertStatus(self.client.head('/non-existent-user/profile'), 404)
def test_entity_profile_json (self):
r = self.client.get('/testuser/profile')
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
Test that the api 404's when a user does not exist
|
Test that the api 404's when a user does not exist
|
Python
|
apache-2.0
|
pytent/pytentd
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.user = Entity(name="testuser")
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head("/testuser/")
self.assertIn("/testuser/profile", r.headers['Link'])
def test_entity_profile_json (self):
r = self.client.get("/testuser/profile")
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
Test that the api 404's when a user does not exist
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.name = 'testuser'
self.user = Entity(name=self.name)
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head('/' + self.name)
self.assertIn('/' + self.name + 'profile', r.headers['Link'])
def test_entity_link_404 (self):
self.assertStatus(self.client.head('/non-existent-user'), 404)
def test_entity_profile_404 (self):
self.assertStatus(self.client.head('/non-existent-user/profile'), 404)
def test_entity_profile_json (self):
r = self.client.get('/testuser/profile')
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
<commit_before>from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.user = Entity(name="testuser")
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head("/testuser/")
self.assertIn("/testuser/profile", r.headers['Link'])
def test_entity_profile_json (self):
r = self.client.get("/testuser/profile")
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
<commit_msg>Test that the api 404's when a user does not exist<commit_after>
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.name = 'testuser'
self.user = Entity(name=self.name)
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head('/' + self.name)
self.assertIn('/' + self.name + 'profile', r.headers['Link'])
def test_entity_link_404 (self):
self.assertStatus(self.client.head('/non-existent-user'), 404)
def test_entity_profile_404 (self):
self.assertStatus(self.client.head('/non-existent-user/profile'), 404)
def test_entity_profile_json (self):
r = self.client.get('/testuser/profile')
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.user = Entity(name="testuser")
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head("/testuser/")
self.assertIn("/testuser/profile", r.headers['Link'])
def test_entity_profile_json (self):
r = self.client.get("/testuser/profile")
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
Test that the api 404's when a user does not existfrom json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.name = 'testuser'
self.user = Entity(name=self.name)
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head('/' + self.name)
self.assertIn('/' + self.name + 'profile', r.headers['Link'])
def test_entity_link_404 (self):
self.assertStatus(self.client.head('/non-existent-user'), 404)
def test_entity_profile_404 (self):
self.assertStatus(self.client.head('/non-existent-user/profile'), 404)
def test_entity_profile_json (self):
r = self.client.get('/testuser/profile')
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
<commit_before>from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.user = Entity(name="testuser")
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head("/testuser/")
self.assertIn("/testuser/profile", r.headers['Link'])
def test_entity_profile_json (self):
r = self.client.get("/testuser/profile")
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
<commit_msg>Test that the api 404's when a user does not exist<commit_after>from json import loads
from tests import AppTestCase, main
from tentd import db
from tentd.models.entity import Entity
class EntityBlueprintTest (AppTestCase):
def setUp (self):
super(EntityBlueprintTest, self).setUp()
self.name = 'testuser'
self.user = Entity(name=self.name)
db.session.add(self.user)
db.session.commit()
def test_entity_link (self):
r = self.client.head('/' + self.name)
self.assertIn('/' + self.name + 'profile', r.headers['Link'])
def test_entity_link_404 (self):
self.assertStatus(self.client.head('/non-existent-user'), 404)
def test_entity_profile_404 (self):
self.assertStatus(self.client.head('/non-existent-user/profile'), 404)
def test_entity_profile_json (self):
r = self.client.get('/testuser/profile')
self.assertEquals(r.mimetype, 'application/json')
self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json)
if __name__ == "__main__":
main()
|
8b0dcf1bfda26ab9463d2c5a892b7ffd3fa015d9
|
packs/github/actions/lib/formatters.py
|
packs/github/actions/lib/formatters.py
|
__all__ = [
'issue_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
result['labels'] = issue.labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
|
__all__ = [
'issue_to_dict',
'label_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
if issue.labels:
labels = [label_to_dict(label) for label in issue.labels]
else:
labels = []
result['labels'] = labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
def label_to_dict(label):
result = {}
result['name'] = label.name
result['color'] = label.color
result['url'] = label.url
return result
|
Make sure we flatten the labels attribute to a serializable simple type.
|
Make sure we flatten the labels attribute to a serializable simple type.
|
Python
|
apache-2.0
|
pearsontechnology/st2contrib,pidah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,psychopenguin/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,pidah/st2contrib,armab/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,armab/st2contrib
|
__all__ = [
'issue_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
result['labels'] = issue.labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
Make sure we flatten the labels attribute to a serializable simple type.
|
__all__ = [
'issue_to_dict',
'label_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
if issue.labels:
labels = [label_to_dict(label) for label in issue.labels]
else:
labels = []
result['labels'] = labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
def label_to_dict(label):
result = {}
result['name'] = label.name
result['color'] = label.color
result['url'] = label.url
return result
|
<commit_before>__all__ = [
'issue_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
result['labels'] = issue.labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
<commit_msg>Make sure we flatten the labels attribute to a serializable simple type.<commit_after>
|
__all__ = [
'issue_to_dict',
'label_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
if issue.labels:
labels = [label_to_dict(label) for label in issue.labels]
else:
labels = []
result['labels'] = labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
def label_to_dict(label):
result = {}
result['name'] = label.name
result['color'] = label.color
result['url'] = label.url
return result
|
__all__ = [
'issue_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
result['labels'] = issue.labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
Make sure we flatten the labels attribute to a serializable simple type.__all__ = [
'issue_to_dict',
'label_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
if issue.labels:
labels = [label_to_dict(label) for label in issue.labels]
else:
labels = []
result['labels'] = labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
def label_to_dict(label):
result = {}
result['name'] = label.name
result['color'] = label.color
result['url'] = label.url
return result
|
<commit_before>__all__ = [
'issue_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
result['labels'] = issue.labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
<commit_msg>Make sure we flatten the labels attribute to a serializable simple type.<commit_after>__all__ = [
'issue_to_dict',
'label_to_dict'
]
def issue_to_dict(issue):
result = {}
if issue.closed_by:
closed_by = issue.closed_by.name
else:
closed_by = None
result['id'] = issue.id
result['repository'] = issue.repository.name
result['title'] = issue.title
result['body'] = issue.body
result['url'] = issue.html_url
result['state'] = issue.state
if issue.labels:
labels = [label_to_dict(label) for label in issue.labels]
else:
labels = []
result['labels'] = labels
result['created_at'] = issue.created_at
result['closed_at'] = issue.closed_at
result['closed_by'] = closed_by
return result
def label_to_dict(label):
result = {}
result['name'] = label.name
result['color'] = label.color
result['url'] = label.url
return result
|
2d841bd7dcd7a7b564d8749b7faa9c9634f0dc55
|
tests/lib/yaml/exceptions.py
|
tests/lib/yaml/exceptions.py
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
def __str__(self):
# Format a reason
if not self.args:
message = "unknown"
elif len(self.args) == 1:
message = self.args[0]
else:
try:
message = self.args[0].format(*self.args[1:])
except Exception as error:
message = "Unable to format message: {}\n{}".format(
self.args, error
)
# Print the reason
return "YAML is malformed -- reason: {}".format(message)
|
Format Error message in the exception
|
Format Error message in the exception
|
Python
|
mit
|
pradyunsg/zazo,pradyunsg/zazo
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
Format Error message in the exception
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
def __str__(self):
# Format a reason
if not self.args:
message = "unknown"
elif len(self.args) == 1:
message = self.args[0]
else:
try:
message = self.args[0].format(*self.args[1:])
except Exception as error:
message = "Unable to format message: {}\n{}".format(
self.args, error
)
# Print the reason
return "YAML is malformed -- reason: {}".format(message)
|
<commit_before>class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
<commit_msg>Format Error message in the exception<commit_after>
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
def __str__(self):
# Format a reason
if not self.args:
message = "unknown"
elif len(self.args) == 1:
message = self.args[0]
else:
try:
message = self.args[0].format(*self.args[1:])
except Exception as error:
message = "Unable to format message: {}\n{}".format(
self.args, error
)
# Print the reason
return "YAML is malformed -- reason: {}".format(message)
|
class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
Format Error message in the exceptionclass YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
def __str__(self):
# Format a reason
if not self.args:
message = "unknown"
elif len(self.args) == 1:
message = self.args[0]
else:
try:
message = self.args[0].format(*self.args[1:])
except Exception as error:
message = "Unable to format message: {}\n{}".format(
self.args, error
)
# Print the reason
return "YAML is malformed -- reason: {}".format(message)
|
<commit_before>class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
<commit_msg>Format Error message in the exception<commit_after>class YAMLException(Exception):
"""Base for the exception hierarchy of this module
"""
def __str__(self):
# Format a reason
if not self.args:
message = "unknown"
elif len(self.args) == 1:
message = self.args[0]
else:
try:
message = self.args[0].format(*self.args[1:])
except Exception as error:
message = "Unable to format message: {}\n{}".format(
self.args, error
)
# Print the reason
return "YAML is malformed -- reason: {}".format(message)
|
2421212be1072db1428e7c832c0818a3928c1153
|
tests/test_collection_crs.py
|
tests/test_collection_crs.py
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
def test_collection_create_crs_wkt(tmpdir):
"""A collection can be created using crs_wkt"""
filename = str(tmpdir.join("test.shp"))
wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst:
assert dst.crs_wkt == wkt
with fiona.open(filename) as col:
assert col.crs_wkt.startswith('GEOGCS["WGS 84')
|
Add test for collection creation with crs_wkt
|
Add test for collection creation with crs_wkt
|
Python
|
bsd-3-clause
|
Toblerity/Fiona,Toblerity/Fiona,rbuffat/Fiona,rbuffat/Fiona
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
Add test for collection creation with crs_wkt
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
def test_collection_create_crs_wkt(tmpdir):
"""A collection can be created using crs_wkt"""
filename = str(tmpdir.join("test.shp"))
wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst:
assert dst.crs_wkt == wkt
with fiona.open(filename) as col:
assert col.crs_wkt.startswith('GEOGCS["WGS 84')
|
<commit_before>import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
<commit_msg>Add test for collection creation with crs_wkt<commit_after>
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
def test_collection_create_crs_wkt(tmpdir):
"""A collection can be created using crs_wkt"""
filename = str(tmpdir.join("test.shp"))
wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst:
assert dst.crs_wkt == wkt
with fiona.open(filename) as col:
assert col.crs_wkt.startswith('GEOGCS["WGS 84')
|
import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
Add test for collection creation with crs_wktimport os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
def test_collection_create_crs_wkt(tmpdir):
"""A collection can be created using crs_wkt"""
filename = str(tmpdir.join("test.shp"))
wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst:
assert dst.crs_wkt == wkt
with fiona.open(filename) as col:
assert col.crs_wkt.startswith('GEOGCS["WGS 84')
|
<commit_before>import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
<commit_msg>Add test for collection creation with crs_wkt<commit_after>import os
import re
import fiona
import fiona.crs
from .conftest import WGS84PATTERN
def test_collection_crs_wkt(path_coutwildrnp_shp):
with fiona.open(path_coutwildrnp_shp) as src:
assert re.match(WGS84PATTERN, src.crs_wkt)
def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp):
"""crs members of a dataset with no crs can be accessed safely."""
filename = str(tmpdir.join("test.shp"))
with fiona.open(path_coutwildrnp_shp) as src:
profile = src.meta
del profile['crs']
del profile['crs_wkt']
with fiona.open(filename, 'w', **profile) as dst:
assert dst.crs_wkt == ""
assert dst.crs == {}
def test_collection_create_crs_wkt(tmpdir):
"""A collection can be created using crs_wkt"""
filename = str(tmpdir.join("test.shp"))
wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst:
assert dst.crs_wkt == wkt
with fiona.open(filename) as col:
assert col.crs_wkt.startswith('GEOGCS["WGS 84')
|
86a8034101c27ffd9daf15b6cd884c6b511feecc
|
python/protein-translation/protein_translation.py
|
python/protein-translation/protein_translation.py
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UAU": "Tyrosine",
"UAC": "Tyrosine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
Fix mapping for codon keys for Tyrosine
|
Fix mapping for codon keys for Tyrosine
|
Python
|
mit
|
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
Fix mapping for codon keys for Tyrosine
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UAU": "Tyrosine",
"UAC": "Tyrosine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
<commit_before># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
<commit_msg>Fix mapping for codon keys for Tyrosine<commit_after>
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UAU": "Tyrosine",
"UAC": "Tyrosine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
Fix mapping for codon keys for Tyrosine# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UAU": "Tyrosine",
"UAC": "Tyrosine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
<commit_before># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
<commit_msg>Fix mapping for codon keys for Tyrosine<commit_after># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UAU": "Tyrosine",
"UAC": "Tyrosine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
2e07218fe864104d31c5c5df285e3a97f2dbfe4f
|
randomizer/tests.py
|
randomizer/tests.py
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
def test_no_restaurants_bug(self):
"""tests a bug occuring when no record exits in the restaurants DB"""
response = self.client.get('/')
self.assertTrue('restaurant' not in response.context)
|
Add a test for the bug occuring when no restaurants exist
|
Add a test for the bug occuring when no restaurants exist
|
Python
|
isc
|
PyJAX/foodie,PyJAX/foodie
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
Add a test for the bug occuring when no restaurants exist
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
def test_no_restaurants_bug(self):
"""tests a bug occuring when no record exits in the restaurants DB"""
response = self.client.get('/')
self.assertTrue('restaurant' not in response.context)
|
<commit_before>from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
<commit_msg>Add a test for the bug occuring when no restaurants exist<commit_after>
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
def test_no_restaurants_bug(self):
"""tests a bug occuring when no record exits in the restaurants DB"""
response = self.client.get('/')
self.assertTrue('restaurant' not in response.context)
|
from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
Add a test for the bug occuring when no restaurants existfrom django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
def test_no_restaurants_bug(self):
"""tests a bug occuring when no record exits in the restaurants DB"""
response = self.client.get('/')
self.assertTrue('restaurant' not in response.context)
|
<commit_before>from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
<commit_msg>Add a test for the bug occuring when no restaurants exist<commit_after>from django.test import TestCase
from randomizer.models import Restaurant
class RandomizerTest(TestCase):
"""tests for the randomizer"""
def test_homepage(self):
"""tests the homepage"""
restaurant1 = Restaurant.objects.create(name='1')
restaurant2 = Restaurant.objects.create(name='2')
response = self.client.get('/')
self.assertIn(response.context['restaurant'],
[restaurant1, restaurant2])
def test_deleted_restaurant_bug(self):
"""tests a bug that occurs when you delete a restaurant from the DB"""
restaurant1 = Restaurant.objects.create(name='A terrible place!')
restaurant2 = Restaurant.objects.create(name='Foodie Heaven')
restaurant1.delete()
response = self.client.get('/')
self.assertEqual(response.context['restaurant'], restaurant2)
def test_no_restaurants_bug(self):
"""tests a bug occuring when no record exits in the restaurants DB"""
response = self.client.get('/')
self.assertTrue('restaurant' not in response.context)
|
1fe53ccce2aa9227bcb2b8f8cdfa576924d81fbd
|
range_hits_board.py
|
range_hits_board.py
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
rc_counts = [0] * 10
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
rc_counts[rc] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts))
for i in range(1,10):
n = rc_counts[i]
rc_str = pad_to(15, e.class_to_string(i))
print rc_str, n, '\t', round(n / denom * 100, 2)
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
basic_keys = []
rc_counts = {}
for i in range(1,10):
s = e.class_to_string(i)
basic_keys.append(s)
rc_counts[s] = 0
## Two input vars:
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
s = e.class_to_string(rc)
rc_counts[s] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts.values()))
for s in basic_keys:
n = rc_counts[s]
print pad_to(15, s), n, '\t', round(n / denom * 100, 2)
|
Change rc_counts to a dict instead of list.
|
Change rc_counts to a dict instead of list.
|
Python
|
mit
|
zimolzak/poker-experiments,zimolzak/poker-experiments,zimolzak/poker-experiments
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
rc_counts = [0] * 10
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
rc_counts[rc] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts))
for i in range(1,10):
n = rc_counts[i]
rc_str = pad_to(15, e.class_to_string(i))
print rc_str, n, '\t', round(n / denom * 100, 2)
Change rc_counts to a dict instead of list.
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
basic_keys = []
rc_counts = {}
for i in range(1,10):
s = e.class_to_string(i)
basic_keys.append(s)
rc_counts[s] = 0
## Two input vars:
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
s = e.class_to_string(rc)
rc_counts[s] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts.values()))
for s in basic_keys:
n = rc_counts[s]
print pad_to(15, s), n, '\t', round(n / denom * 100, 2)
|
<commit_before>from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
rc_counts = [0] * 10
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
rc_counts[rc] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts))
for i in range(1,10):
n = rc_counts[i]
rc_str = pad_to(15, e.class_to_string(i))
print rc_str, n, '\t', round(n / denom * 100, 2)
<commit_msg>Change rc_counts to a dict instead of list.<commit_after>
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
basic_keys = []
rc_counts = {}
for i in range(1,10):
s = e.class_to_string(i)
basic_keys.append(s)
rc_counts[s] = 0
## Two input vars:
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
s = e.class_to_string(rc)
rc_counts[s] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts.values()))
for s in basic_keys:
n = rc_counts[s]
print pad_to(15, s), n, '\t', round(n / denom * 100, 2)
|
from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
rc_counts = [0] * 10
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
rc_counts[rc] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts))
for i in range(1,10):
n = rc_counts[i]
rc_str = pad_to(15, e.class_to_string(i))
print rc_str, n, '\t', round(n / denom * 100, 2)
Change rc_counts to a dict instead of list.from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
basic_keys = []
rc_counts = {}
for i in range(1,10):
s = e.class_to_string(i)
basic_keys.append(s)
rc_counts[s] = 0
## Two input vars:
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
s = e.class_to_string(rc)
rc_counts[s] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts.values()))
for s in basic_keys:
n = rc_counts[s]
print pad_to(15, s), n, '\t', round(n / denom * 100, 2)
|
<commit_before>from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
rc_counts = [0] * 10
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
rc_counts[rc] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts))
for i in range(1,10):
n = rc_counts[i]
rc_str = pad_to(15, e.class_to_string(i))
print rc_str, n, '\t', round(n / denom * 100, 2)
<commit_msg>Change rc_counts to a dict instead of list.<commit_after>from convenience_hole import all_hands_in_range
from convenience import pr
from deuces.deuces import Card, Evaluator
e = Evaluator()
basic_keys = []
rc_counts = {}
for i in range(1,10):
s = e.class_to_string(i)
basic_keys.append(s)
rc_counts[s] = 0
## Two input vars:
board = [Card.new('Qs'), Card.new('Jd'), Card.new('2c')]
range_list = ['AA', 'KK', 'QQ', 'AK', 'AKs']
## tricky ones highlighted:
## 1 2 3 4 5 6 7 8 9
## sf quad boat flush straight trip set 2p overp tp 1.5p mp wp ah nmh
## ^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ ^^^^^^
print "Range:", range_list
print "Board:",
pr(board)
lol = all_hands_in_range(range_list)
for L in lol:
hr = e.evaluate(L, board)
rc = e.get_rank_class(hr)
s = e.class_to_string(rc)
rc_counts[s] += 1
def pad_to(n, s):
while len(s) < n:
s += ' '
return s
print('\nResults\n========')
denom = float(sum(rc_counts.values()))
for s in basic_keys:
n = rc_counts[s]
print pad_to(15, s), n, '\t', round(n / denom * 100, 2)
|
aee872021119686f9efa08b1a2933027da3ae3c0
|
setup.py
|
setup.py
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
Update development status to "beta"
|
Update development status to "beta"
It's live on the website so it can't really be called "pre-alpha" anymore!
|
Python
|
mit
|
jribbens/voting,jribbens/voting
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
Update development status to "beta"
It's live on the website so it can't really be called "pre-alpha" anymore!
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
<commit_before>"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
<commit_msg>Update development status to "beta"
It's live on the website so it can't really be called "pre-alpha" anymore!<commit_after>
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
Update development status to "beta"
It's live on the website so it can't really be called "pre-alpha" anymore!"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
<commit_before>"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
<commit_msg>Update development status to "beta"
It's live on the website so it can't really be called "pre-alpha" anymore!<commit_after>"""voting setuptools information."""
import setuptools
setuptools.setup(
name="voting",
version="0.0.1",
description="UKVoting web systems",
author="Jon Ribbens",
author_email="jon-voting@unequivocal.eu",
url="https://github.com/jribbens/voting",
license="MIT",
py_modules=["voting"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django :: 1.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
],
install_requires=[
"Django>=1.9",
"dnspython3>=1.12",
]
)
|
d1ffc7a842fbe216bc4ef180ada54a016801caab
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
Add Django version trove classifiers
|
Add Django version trove classifiers
|
Python
|
bsd-3-clause
|
brutasse/django-password-reset,brutasse/django-password-reset
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
Add Django version trove classifiers
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
<commit_msg>Add Django version trove classifiers<commit_after>
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
Add Django version trove classifiers# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
<commit_msg>Add Django version trove classifiers<commit_after># -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-password-reset',
version=__import__('password_reset').__version__,
author='Bruno Renie',
author_email='bruno@renie.fr',
packages=find_packages(),
include_package_data=True,
url='https://github.com/brutasse/django-password-reset',
license='BSD licence, see LICENSE file',
description='Class-based views for password reset.',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.8',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='runtests.runtests',
zip_safe=False,
)
|
ec5545c81369497304382a132b1143ac21a18b01
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'mock==2.0.0',
'nose==1.3.7',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
license = "Apache-2.0",
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
Drop `mock` and `nose` as package dependencies
|
Drop `mock` and `nose` as package dependencies
That `nose` is used as a test runner (and `mock` is used in those tests)
has nothing to do with the package itself. Rather, these are just
dependencies needed in order to *run tests.*
Note that we're still pinning to very precise version numbers, for no
particularly compelling reason. We'll fix that soon.
(`version` number is not increased, since this commit won't be packaged
& released).
|
Python
|
apache-2.0
|
color/clrsvsim
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'mock==2.0.0',
'nose==1.3.7',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
license = "Apache-2.0",
)
Drop `mock` and `nose` as package dependencies
That `nose` is used as a test runner (and `mock` is used in those tests)
has nothing to do with the package itself. Rather, these are just
dependencies needed in order to *run tests.*
Note that we're still pinning to very precise version numbers, for no
particularly compelling reason. We'll fix that soon.
(`version` number is not increased, since this commit won't be packaged
& released).
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'mock==2.0.0',
'nose==1.3.7',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
license = "Apache-2.0",
)
<commit_msg>Drop `mock` and `nose` as package dependencies
That `nose` is used as a test runner (and `mock` is used in those tests)
has nothing to do with the package itself. Rather, these are just
dependencies needed in order to *run tests.*
Note that we're still pinning to very precise version numbers, for no
particularly compelling reason. We'll fix that soon.
(`version` number is not increased, since this commit won't be packaged
& released).<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'mock==2.0.0',
'nose==1.3.7',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
license = "Apache-2.0",
)
Drop `mock` and `nose` as package dependencies
That `nose` is used as a test runner (and `mock` is used in those tests)
has nothing to do with the package itself. Rather, these are just
dependencies needed in order to *run tests.*
Note that we're still pinning to very precise version numbers, for no
particularly compelling reason. We'll fix that soon.
(`version` number is not increased, since this commit won't be packaged
& released).try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'mock==2.0.0',
'nose==1.3.7',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
license = "Apache-2.0",
)
<commit_msg>Drop `mock` and `nose` as package dependencies
That `nose` is used as a test runner (and `mock` is used in those tests)
has nothing to do with the package itself. Rather, these are just
dependencies needed in order to *run tests.*
Note that we're still pinning to very precise version numbers, for no
particularly compelling reason. We'll fix that soon.
(`version` number is not increased, since this commit won't be packaged
& released).<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
4301049ac2ad9d0c79b5f50fea2055ec2d567019
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
Move to Development Status :: 4 - Beta
|
Move to Development Status :: 4 - Beta
|
Python
|
mit
|
vshymanskyy/blynk-library-python
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
Move to Development Status :: 4 - Beta
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
<commit_msg>Move to Development Status :: 4 - Beta<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
Move to Development Status :: 4 - Beta#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
<commit_msg>Move to Development Status :: 4 - Beta<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
|
1b953be2592d2e9fc68da9e6c5a683ea8dee6b10
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_utl import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
Fix typo in file_util import
|
Fix typo in file_util import
|
Python
|
mit
|
crashlytics/riemann-sumd
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_utl import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')Fix typo in file_util import
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from distutils.file_utl import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')<commit_msg>Fix typo in file_util import<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_utl import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')Fix typo in file_util import#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from distutils.file_utl import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')<commit_msg>Fix typo in file_util import<commit_after>#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
3ed984c402a74d3b3411f3410d932d12b164737b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='jsonate',
version='0.3.2',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.4",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
from setuptools import setup
setup(
name='jsonate',
version='0.4.0',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.7",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
Increment version for django 1.9 support
|
Increment version for django 1.9 support
|
Python
|
mit
|
Rootbuzz/JSONate
|
from setuptools import setup
setup(
name='jsonate',
version='0.3.2',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.4",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
Increment version for django 1.9 support
|
from setuptools import setup
setup(
name='jsonate',
version='0.4.0',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.7",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
<commit_before>from setuptools import setup
setup(
name='jsonate',
version='0.3.2',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.4",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
<commit_msg>Increment version for django 1.9 support<commit_after>
|
from setuptools import setup
setup(
name='jsonate',
version='0.4.0',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.7",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
from setuptools import setup
setup(
name='jsonate',
version='0.3.2',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.4",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
Increment version for django 1.9 supportfrom setuptools import setup
setup(
name='jsonate',
version='0.4.0',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.7",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
<commit_before>from setuptools import setup
setup(
name='jsonate',
version='0.3.2',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.4",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
<commit_msg>Increment version for django 1.9 support<commit_after>from setuptools import setup
setup(
name='jsonate',
version='0.4.0',
author='James Robert',
author_email='jiaaro@gmail.com',
description=('Django library that can make ANYTHING into json'),
long_description=open('README.markdown').read(),
license='MIT',
keywords='django json templatetags',
url='http://jsonate.com',
install_requires=[
"django>=1.7",
],
packages=[
'jsonate',
'jsonate.templatetags',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities'
]
)
|
ecc2a444294bffd8295f7cfe92f9b6612205019d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
Break multiple imports to multiple lines
|
Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76
|
Python
|
apache-2.0
|
stackforge/inception,stackforge/inception
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
<commit_msg>Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
<commit_msg>Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 AT&T. All right reserved.
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
c6f36b517c294d368a7bc75dc359ab32b5917228
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='django-waffle',
version='0.7',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=['waffle', 'waffle.templatetags'],
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup, find_packages
setup(
name='django-waffle',
version='0.7.1',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=find_packages(exclude=['test_app']),
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Switch to find_packages. Bump for PyPI.
|
Switch to find_packages. Bump for PyPI.
|
Python
|
bsd-3-clause
|
ilanbm/django-waffle,JeLoueMonCampingCar/django-waffle,ilanbm/django-waffle,isotoma/django-waffle,ilanbm/django-waffle,groovecoder/django-waffle,rsalmaso/django-waffle,mark-adams/django-waffle,JeLoueMonCampingCar/django-waffle,rodgomes/django-waffle,TwigWorld/django-waffle,webus/django-waffle,paulcwatts/django-waffle,ekohl/django-waffle,rsalmaso/django-waffle,mwaaas/django-waffle-session,paulcwatts/django-waffle,festicket/django-waffle,VladimirFilonov/django-waffle,safarijv/django-waffle,mwaaas/django-waffle-session,isotoma/django-waffle,ilanbm/django-waffle,rlr/django-waffle,engagespark/django-waffle,crccheck/django-waffle,mark-adams/django-waffle,rlr/django-waffle,rlr/django-waffle,crccheck/django-waffle,paulcwatts/django-waffle,11craft/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,rodgomes/django-waffle,styleseat/django-waffle,hwkns/django-waffle,hwkns/django-waffle,ekohl/django-waffle,VladimirFilonov/django-waffle,mark-adams/django-waffle,engagespark/django-waffle,rsalmaso/django-waffle,mwaaas/django-waffle-session,webus/django-waffle,safarijv/django-waffle,TwigWorld/django-waffle,rlr/django-waffle,webus/django-waffle,JeLoueMonCampingCar/django-waffle,festicket/django-waffle,mwaaas/django-waffle-session,styleseat/django-waffle,hwkns/django-waffle,safarijv/django-waffle,mark-adams/django-waffle,styleseat/django-waffle,engagespark/django-waffle,festicket/django-waffle,hwkns/django-waffle,engagespark/django-waffle,willkg/django-waffle,crccheck/django-waffle,rodgomes/django-waffle,webus/django-waffle,11craft/django-waffle,isotoma/django-waffle,groovecoder/django-waffle,JeLoueMonCampingCar/django-waffle,safarijv/django-waffle,crccheck/django-waffle,VladimirFilonov/django-waffle,groovecoder/django-waffle,rodgomes/django-waffle,isotoma/django-waffle,groovecoder/django-waffle,styleseat/django-waffle,festicket/django-waffle,willkg/django-waffle,TwigWorld/django-waffle,rsalmaso/django-waffle
|
from setuptools import setup
setup(
name='django-waffle',
version='0.7',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=['waffle', 'waffle.templatetags'],
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Switch to find_packages. Bump for PyPI.
|
from setuptools import setup, find_packages
setup(
name='django-waffle',
version='0.7.1',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=find_packages(exclude=['test_app']),
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup
setup(
name='django-waffle',
version='0.7',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=['waffle', 'waffle.templatetags'],
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Switch to find_packages. Bump for PyPI.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='django-waffle',
version='0.7.1',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=find_packages(exclude=['test_app']),
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup
setup(
name='django-waffle',
version='0.7',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=['waffle', 'waffle.templatetags'],
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Switch to find_packages. Bump for PyPI.from setuptools import setup, find_packages
setup(
name='django-waffle',
version='0.7.1',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=find_packages(exclude=['test_app']),
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup
setup(
name='django-waffle',
version='0.7',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=['waffle', 'waffle.templatetags'],
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Switch to find_packages. Bump for PyPI.<commit_after>from setuptools import setup, find_packages
setup(
name='django-waffle',
version='0.7.1',
description='A feature flipper for Django.',
long_description=open('README.rst').read(),
author='James Socol',
author_email='james.socol@gmail.com',
url='http://github.com/jsocol/django-waffle',
license='BSD',
packages=find_packages(exclude=['test_app']),
include_package_data=True,
package_data={'': ['README.rst']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
72ab88b892209249f731242e85603dab691180c2
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
setup(
name='docstash',
version='0.2.2',
description="Store a set of documents and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"PyYAML>=3.11",
"Werkzeug>=0.9.6",
"lockfile>=0.9.1"
],
entry_points={},
tests_require=[]
)
|
from setuptools import setup, find_packages
setup(
name='barn',
version='0.0.1',
description="Store a set of files and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"Werkzeug>=0.9.6",
"lockfile>=0.9.1",
"python-slugify>=0.0.6"
],
entry_points={},
tests_require=[]
)
|
Rename the python package to barn.
|
Rename the python package to barn.
|
Python
|
mit
|
pudo/archivekit
|
import os
from setuptools import setup, find_packages
setup(
name='docstash',
version='0.2.2',
description="Store a set of documents and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"PyYAML>=3.11",
"Werkzeug>=0.9.6",
"lockfile>=0.9.1"
],
entry_points={},
tests_require=[]
)
Rename the python package to barn.
|
from setuptools import setup, find_packages
setup(
name='barn',
version='0.0.1',
description="Store a set of files and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"Werkzeug>=0.9.6",
"lockfile>=0.9.1",
"python-slugify>=0.0.6"
],
entry_points={},
tests_require=[]
)
|
<commit_before>import os
from setuptools import setup, find_packages
setup(
name='docstash',
version='0.2.2',
description="Store a set of documents and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"PyYAML>=3.11",
"Werkzeug>=0.9.6",
"lockfile>=0.9.1"
],
entry_points={},
tests_require=[]
)
<commit_msg>Rename the python package to barn.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='barn',
version='0.0.1',
description="Store a set of files and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"Werkzeug>=0.9.6",
"lockfile>=0.9.1",
"python-slugify>=0.0.6"
],
entry_points={},
tests_require=[]
)
|
import os
from setuptools import setup, find_packages
setup(
name='docstash',
version='0.2.2',
description="Store a set of documents and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"PyYAML>=3.11",
"Werkzeug>=0.9.6",
"lockfile>=0.9.1"
],
entry_points={},
tests_require=[]
)
Rename the python package to barn.from setuptools import setup, find_packages
setup(
name='barn',
version='0.0.1',
description="Store a set of files and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"Werkzeug>=0.9.6",
"lockfile>=0.9.1",
"python-slugify>=0.0.6"
],
entry_points={},
tests_require=[]
)
|
<commit_before>import os
from setuptools import setup, find_packages
setup(
name='docstash',
version='0.2.2',
description="Store a set of documents and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"PyYAML>=3.11",
"Werkzeug>=0.9.6",
"lockfile>=0.9.1"
],
entry_points={},
tests_require=[]
)
<commit_msg>Rename the python package to barn.<commit_after>from setuptools import setup, find_packages
setup(
name='barn',
version='0.0.1',
description="Store a set of files and metadata in an organized way",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='',
author='Friedrich Lindenberg',
author_email='friedrich@pudo.org',
url='http://pudo.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
"Werkzeug>=0.9.6",
"lockfile>=0.9.1",
"python-slugify>=0.0.6"
],
entry_points={},
tests_require=[]
)
|
f93362273afc341ba4b5c458adc5946d8019a992
|
setup.py
|
setup.py
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
Add a few PyPI classifiers.
|
Add a few PyPI classifiers.
|
Python
|
mit
|
jtriley/pystun,b1naryth1ef/pystun
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
Add a few PyPI classifiers.
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
<commit_before>import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
<commit_msg>Add a few PyPI classifiers.<commit_after>
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
Add a few PyPI classifiers.import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
<commit_before>import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
<commit_msg>Add a few PyPI classifiers.<commit_after>import os.path
from setuptools import setup, find_packages
import stun
def main():
src = os.path.realpath(os.path.dirname(__file__))
README = open(os.path.join(src, 'README.rst')).read()
setup(
name='pystun',
version=stun.__version__,
packages=find_packages(),
scripts=['bin/pystun'],
zip_safe=False,
license='MIT',
author='Justin Riley (original author: gaohawk)',
author_email='justin.t.riley@gmail.com',
url="http://github.com/jtriley/pystun",
description="A Python STUN client for getting NAT type and external IP (RFC 3489)",
long_description=README,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking :: Firewalls",
"Programming Language :: Python",
],
)
if __name__ == '__main__':
main()
|
475170acd3119cfa35112f6e470a33dc1f47e5ef
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = find_packages(),
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
from setuptools import setup
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = ['pyconll', 'pyconll.unit'],
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
Update to not include test packages.
|
Update to not include test packages.
|
Python
|
mit
|
pyconll/pyconll,pyconll/pyconll
|
from setuptools import setup, find_packages
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = find_packages(),
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
Update to not include test packages.
|
from setuptools import setup
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = ['pyconll', 'pyconll.unit'],
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = find_packages(),
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
<commit_msg>Update to not include test packages.<commit_after>
|
from setuptools import setup
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = ['pyconll', 'pyconll.unit'],
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
from setuptools import setup, find_packages
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = find_packages(),
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
Update to not include test packages.from setuptools import setup
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = ['pyconll', 'pyconll.unit'],
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = find_packages(),
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
<commit_msg>Update to not include test packages.<commit_after>from setuptools import setup
import os
def read(fn):
"""
Read the contents of the provided filename.
Args:
fn: The filename to read in.
Returns:
The contents of the file.
"""
abs_fn = os.path.join(os.path.dirname(__file__), fn)
f = open(abs_fn)
contents = f.read()
f.close()
return contents
setup(
name = 'pyconll',
packages = ['pyconll', 'pyconll.unit'],
version = '1.0',
description = 'Read and maniuplate CoNLL files',
long_description = read('README.rst'),
author = 'Matias Grioni',
author_email = 'matgrioni@gmail.com',
url = 'https://github.com/pyconll/pyconll',
license = 'MIT',
keywords = ['nlp', 'conllu', 'conll', 'universal dependencies'],
install_requires =[
'requests >= 2.19'
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Utilities'
]
)
|
f36db59a863c3208955a3f64ccd2c98d8a450f9b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
Upgrade all dependencies to latest version.
|
Upgrade all dependencies to latest version.
|
Python
|
apache-2.0
|
cbrichford/docker-ipsec
|
from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
Upgrade all dependencies to latest version.
|
from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
<commit_before>from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
<commit_msg>Upgrade all dependencies to latest version.<commit_after>
|
from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
Upgrade all dependencies to latest version.from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
<commit_before>from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
<commit_msg>Upgrade all dependencies to latest version.<commit_after>from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
|
4121ce6f097894c666eadddcc8405b13eb6ba56a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT'
)
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Logging',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
Add PyPi trove classifiers to document Python 3 support. Add other applicable classifiers while we're here.
|
Add PyPi trove classifiers to document Python 3 support.
Add other applicable classifiers while we're here.
|
Python
|
mit
|
keeprocking/pygelf,keeprocking/pygelf
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT'
)
Add PyPi trove classifiers to document Python 3 support.
Add other applicable classifiers while we're here.
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Logging',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT'
)
<commit_msg>Add PyPi trove classifiers to document Python 3 support.
Add other applicable classifiers while we're here.<commit_after>
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Logging',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT'
)
Add PyPi trove classifiers to document Python 3 support.
Add other applicable classifiers while we're here.from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Logging',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT'
)
<commit_msg>Add PyPi trove classifiers to document Python 3 support.
Add other applicable classifiers while we're here.<commit_after>from setuptools import setup
setup(
name='pygelf',
version='0.2.8',
packages=['pygelf'],
description='Logging handlers with GELF support',
keywords='logging udp tcp ssl tls graylog2 graylog gelf',
author='Ivan Mukhin',
author_email='muhin.ivan@gmail.com',
url='https://github.com/keeprocking/pygelf',
long_description=open('README.rst').read(),
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Logging',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
c0a5d8143b87126f78e2c836f9edb5480cb6d317
|
setup.py
|
setup.py
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
|
Add dependency on Django 1.3.
|
Add dependency on Django 1.3.
|
Python
|
bsd-3-clause
|
rmaceissoft/django-photologue,jlemaes/django-photologue,seedwithroot/django-photologue-clone,rmaceissoft/django-photologue,rmaceissoft/django-photologue,MathieuDuponchelle/my_patched_photologue,RossLYoung/django-photologue,jlemaes/django-photologue,seedwithroot/django-photologue-clone,jlemaes/django-photologue,MathieuDuponchelle/my_patched_photologue,RossLYoung/django-photologue,RossLYoung/django-photologue
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Add dependency on Django 1.3.
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
|
<commit_before>#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Add dependency on Django 1.3.<commit_after>
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Add dependency on Django 1.3.#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
|
<commit_before>#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Add dependency on Django 1.3.<commit_after>#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
|
142655a4703aab92619c335e370de1da2af47ff8
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
Add whitespace to trigger travis
|
Add whitespace to trigger travis
|
Python
|
apache-2.0
|
impactlab/eemeter,openeemeter/eemeter,openeemeter/eemeter
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
Add whitespace to trigger travis
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
<commit_before>from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
<commit_msg>Add whitespace to trigger travis<commit_after>
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
Add whitespace to trigger travisfrom setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
<commit_before>from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
<commit_msg>Add whitespace to trigger travis<commit_after>from setuptools import setup, find_packages, Command
version = __import__('eemeter').get_version()
long_description = "Standard methods for calculating energy efficiency savings."
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py', '--runslow', '--cov=eemeter'])
raise SystemExit(errno)
setup(name='eemeter',
version=version,
description='Open Energy Efficiency Meter',
long_description=long_description,
url='https://github.com/impactlab/eemeter/',
author='Matt Gee, Phil Ngo, Eric Potash',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
cmdclass = {'test': PyTest},
keywords='open energy efficiency meter method methods calculation savings',
packages=find_packages(),
install_requires=['pint',
'pyyaml',
'scipy',
'numpy',
'pandas',
'requests',
'pytz'],
package_data={'': ['*.json','*.gz']},
)
|
e493ed3aef03768ba48d0bc5a149af55166e611c
|
setup.py
|
setup.py
|
# vim:fileencoding=utf-8
# Copyright (c) gocept gmbh & co. kg
# See also LICENSE.txt
import os.path
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@gocept.com',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
# vim:fileencoding=utf-8
# Copyright -2014 (c) gocept gmbh & co. kg
# Copyright 2015- (c) Flying Circus Internet Operations GmbH
# See also LICENSE.txt
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@flyingcircus.io',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
Update my contacts and the copyright.
|
Update my contacts and the copyright.
|
Python
|
lgpl-2.1
|
flyingcircusio/pycountry
|
# vim:fileencoding=utf-8
# Copyright (c) gocept gmbh & co. kg
# See also LICENSE.txt
import os.path
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@gocept.com',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
Update my contacts and the copyright.
|
# vim:fileencoding=utf-8
# Copyright -2014 (c) gocept gmbh & co. kg
# Copyright 2015- (c) Flying Circus Internet Operations GmbH
# See also LICENSE.txt
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@flyingcircus.io',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
<commit_before># vim:fileencoding=utf-8
# Copyright (c) gocept gmbh & co. kg
# See also LICENSE.txt
import os.path
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@gocept.com',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
<commit_msg>Update my contacts and the copyright.<commit_after>
|
# vim:fileencoding=utf-8
# Copyright -2014 (c) gocept gmbh & co. kg
# Copyright 2015- (c) Flying Circus Internet Operations GmbH
# See also LICENSE.txt
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@flyingcircus.io',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
# vim:fileencoding=utf-8
# Copyright (c) gocept gmbh & co. kg
# See also LICENSE.txt
import os.path
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@gocept.com',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
Update my contacts and the copyright.# vim:fileencoding=utf-8
# Copyright -2014 (c) gocept gmbh & co. kg
# Copyright 2015- (c) Flying Circus Internet Operations GmbH
# See also LICENSE.txt
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@flyingcircus.io',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
<commit_before># vim:fileencoding=utf-8
# Copyright (c) gocept gmbh & co. kg
# See also LICENSE.txt
import os.path
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@gocept.com',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
<commit_msg>Update my contacts and the copyright.<commit_after># vim:fileencoding=utf-8
# Copyright -2014 (c) gocept gmbh & co. kg
# Copyright 2015- (c) Flying Circus Internet Operations GmbH
# See also LICENSE.txt
from setuptools import setup, find_packages
setup(
name='pycountry',
version='1.12.dev0',
author='Christian Theune',
author_email='ct@flyingcircus.io',
description='ISO country, subdivision, language, currency and script '
'definitions and their translations',
long_description=(
open('README').read() + '\n' +
open('HISTORY.txt').read()),
license='LGPL 2.1',
keywords='country subdivision language currency iso 3166 639 4217 '
'15924 3166-2',
zip_safe=False,
packages=find_packages('src'),
include_package_data=True,
package_dir={'': 'src'})
|
88d405cb9ccea8b591fd282d89e1b47f13a12d7c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
url='https://github.com/remcohaszing/pywakeonlan',
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
project_urls={
'Documentation': 'http://pywakeonlan.readthedocs.io',
'GitHub': 'https://github.com/remcohaszing/pywakeonlan',
},
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
Replace url with multiple project urls
|
Replace url with multiple project urls
|
Python
|
mit
|
remcohaszing/pywakeonlan
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
url='https://github.com/remcohaszing/pywakeonlan',
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
Replace url with multiple project urls
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
project_urls={
'Documentation': 'http://pywakeonlan.readthedocs.io',
'GitHub': 'https://github.com/remcohaszing/pywakeonlan',
},
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
url='https://github.com/remcohaszing/pywakeonlan',
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
<commit_msg>Replace url with multiple project urls<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
project_urls={
'Documentation': 'http://pywakeonlan.readthedocs.io',
'GitHub': 'https://github.com/remcohaszing/pywakeonlan',
},
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
url='https://github.com/remcohaszing/pywakeonlan',
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
Replace url with multiple project urls#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
project_urls={
'Documentation': 'http://pywakeonlan.readthedocs.io',
'GitHub': 'https://github.com/remcohaszing/pywakeonlan',
},
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
url='https://github.com/remcohaszing/pywakeonlan',
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
<commit_msg>Replace url with multiple project urls<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Install the wakeonlan module.
"""
from setuptools import setup
with open('README.rst') as f:
readme = f.read()
setup(
name='wakeonlan',
description='A small python module for wake on lan.',
project_urls={
'Documentation': 'http://pywakeonlan.readthedocs.io',
'GitHub': 'https://github.com/remcohaszing/pywakeonlan',
},
author='Remco Haszing',
author_email='remcohaszing@gmail.com',
py_modules=['wakeonlan'],
license='MIT',
long_description=readme,
use_scm_version=True,
setup_requires=['setuptools-scm ~= 1.15.7'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Networking',
],
entry_points={
'console_scripts': ['wakeonlan = wakeonlan:main'],
})
|
9a935bf1a82742f223442fa3174db04fad075a6a
|
hoomd/tuner/sorter.py
|
hoomd/tuner/sorter.py
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = None
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = grid
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
Fix bug in ParticleSorter initialization
|
Fix bug in ParticleSorter initialization
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = None
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
Fix bug in ParticleSorter initialization
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = grid
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
<commit_before>from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = None
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
<commit_msg>Fix bug in ParticleSorter initialization<commit_after>
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = grid
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = None
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
Fix bug in ParticleSorter initializationfrom hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = grid
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
<commit_before>from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = None
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
<commit_msg>Fix bug in ParticleSorter initialization<commit_after>from hoomd.operation import _Tuner
from hoomd.parameterdicts import ParameterDict
from hoomd.typeconverter import OnlyType
from hoomd.trigger import Trigger
from hoomd import _hoomd
from math import log2, ceil
def to_power_of_two(value):
return int(2. ** ceil(log2(value)))
def natural_number(value):
try:
if value < 1:
raise ValueError("Expected positive integer.")
else:
return value
except TypeError:
raise ValueError("Expected positive integer.")
class ParticleSorter(_Tuner):
def __init__(self, trigger=200, grid=None):
self._param_dict = ParameterDict(
trigger=Trigger,
grid=OnlyType(int,
postprocess=lambda x: int(to_power_of_two(x)),
preprocess=natural_number,
allow_none=True)
)
self.trigger = trigger
self.grid = grid
def attach(self, simulation):
if simulation.device.mode == 'gpu':
cpp_cls = getattr(_hoomd, 'SFCPackTunerGPU')
else:
cpp_cls = getattr(_hoomd, 'SFCPackTuner')
self._cpp_obj = cpp_cls(simulation.state._cpp_sys_def, self.trigger)
super().attach(simulation)
|
e42db2036118752df609d13c6487686a07b0b6b3
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.5",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires=['six', 'matplotlib', 'numpy', 'pyfaidx'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.6",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires = ['six', 'matplotlib', 'numpy', 'simplesam'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
Remove dependency links for pybloom.
|
Remove dependency links for pybloom.
|
Python
|
mit
|
mdshw5/fastqp
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.5",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires=['six', 'matplotlib', 'numpy', 'pyfaidx'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
Remove dependency links for pybloom.
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.6",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires = ['six', 'matplotlib', 'numpy', 'simplesam'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
<commit_before>from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.5",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires=['six', 'matplotlib', 'numpy', 'pyfaidx'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
<commit_msg>Remove dependency links for pybloom.<commit_after>
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.6",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires = ['six', 'matplotlib', 'numpy', 'simplesam'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.5",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires=['six', 'matplotlib', 'numpy', 'pyfaidx'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
Remove dependency links for pybloom.from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.6",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires = ['six', 'matplotlib', 'numpy', 'simplesam'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
<commit_before>from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.5",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires=['six', 'matplotlib', 'numpy', 'pyfaidx'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
<commit_msg>Remove dependency links for pybloom.<commit_after>from setuptools import setup
setup(
name = 'fastqp',
provides = 'fastqp',
version = "0.1.6",
author = 'Matthew Shirley',
author_email = 'mdshw5@gmail.com',
url = 'http://mattshirley.com',
description = 'Simple NGS read quality assessment using Python',
license = 'MIT',
packages = ['fastqp', 'fastqp.backports'],
install_requires = ['six', 'matplotlib', 'numpy', 'simplesam'],
entry_points = { 'console_scripts': [ 'fastqp = fastqp.cli:main' ] },
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
1c8862832a5b4cbb037ee0e76cf3694bcbf52511
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
Use the same packages for installing with pip and distutils
|
Use the same packages for installing with pip and distutils
|
Python
|
mit
|
dex4er/mamba,eferro/mamba,markng/mamba,jaimegildesagredo/mamba,alejandrodob/mamba,angelsanz/mamba,nestorsalceda/mamba
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
Use the same packages for installing with pip and distutils
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
<commit_before>from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
<commit_msg>Use the same packages for installing with pip and distutils<commit_after>
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
Use the same packages for installing with pip and distutilsfrom setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
<commit_before>from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
<commit_msg>Use the same packages for installing with pip and distutils<commit_after>from setuptools import setup, find_packages
version = '0.1'
setup(name='mamba',
version=version,
description="",
long_description=open('README.md').read(),
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
266d268f4463f2ffeb6c0d79d9dd29b409c5510e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
# 'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
Remove progressbar as a requirement
|
Remove progressbar as a requirement
|
Python
|
bsd-2-clause
|
aalto-speech/morfessor,aalto-speech/flatcat
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
Remove progressbar as a requirement
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
# 'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
<commit_before>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
<commit_msg>Remove progressbar as a requirement<commit_after>
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
# 'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
Remove progressbar as a requirement#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
# 'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
<commit_before>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
<commit_msg>Remove progressbar as a requirement<commit_after>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
import re
main_py = open('morfessor.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py))
requires = [
# 'progressbar',
]
setup(name='Morfessor',
version=metadata['version'],
author=metadata['author'],
author_email='morfessor@cis.hut.fi',
url='http://www.cis.hut.fi/projects/morpho/',
description='Morfessor',
py_modules=['morfessor', 'distribute_setup'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
license="BSD",
scripts=['scripts/morfessor'],
install_requires=requires,
)
|
5306d5cdbeaa6e01ebc3de765ae9684ae5d69dbb
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.1',
packages=['private_messages'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.2',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Add migrations to package. Bump version.
|
Add migrations to package. Bump version.
|
Python
|
mit
|
skolsuper/pybbm_private_messages,skolsuper/pybbm_private_messages,skolsuper/pybbm_private_messages
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.1',
packages=['private_messages'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Add migrations to package. Bump version.
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.2',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.1',
packages=['private_messages'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Add migrations to package. Bump version.<commit_after>
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.2',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.1',
packages=['private_messages'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Add migrations to package. Bump version.import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.2',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.1',
packages=['private_messages'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Add migrations to package. Bump version.<commit_after>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.0.2',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='http://www.example.com/',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
fe2bd7cf8b0139e1c7c1037d89929dd7c4093458
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.2.2",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.5.3",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.8.0",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.6.0",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
Update troposphere & awacs to latest releases
|
Update troposphere & awacs to latest releases
|
Python
|
bsd-2-clause
|
mhahn/stacker,mhahn/stacker,remind101/stacker,remind101/stacker
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.2.2",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.5.3",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
Update troposphere & awacs to latest releases
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.8.0",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.6.0",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
<commit_before>import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.2.2",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.5.3",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
<commit_msg>Update troposphere & awacs to latest releases<commit_after>
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.8.0",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.6.0",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.2.2",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.5.3",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
Update troposphere & awacs to latest releasesimport os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.8.0",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.6.0",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
<commit_before>import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.2.2",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.5.3",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
<commit_msg>Update troposphere & awacs to latest releases<commit_after>import os
from setuptools import setup, find_packages
import glob
VERSION = "0.6.3"
src_dir = os.path.dirname(__file__)
install_requires = [
"troposphere>=1.8.0",
"boto3>=1.3.1",
"botocore>=1.4.38",
"PyYAML>=3.11",
"awacs>=0.6.0",
"colorama==0.3.7",
]
tests_require = [
"nose>=1.0",
"mock==1.0.1",
"stacker_blueprints",
"moto",
"testfixtures",
]
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == "__main__":
setup(
name="stacker",
version=VERSION,
author="Michael Barrett",
author_email="loki77@gmail.com",
license="New BSD license",
url="https://github.com/remind101/stacker",
description="Opinionated AWS CloudFormation Stack manager",
long_description=read("README.rst"),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, "scripts", "*")),
install_requires=install_requires,
tests_require=tests_require,
test_suite="nose.collector",
)
|
7e72c8df528c918325afe5eb31422b0f3f6b55e8
|
setup.py
|
setup.py
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
Add missing dependency on pandas
|
Add missing dependency on pandas
|
Python
|
mit
|
ddsc/ddsc-core,ddsc/ddsc-core
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
Add missing dependency on pandas
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
<commit_before>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
<commit_msg>Add missing dependency on pandas<commit_after>
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
Add missing dependency on pandasfrom setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
<commit_before>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
<commit_msg>Add missing dependency on pandas<commit_after>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'django-extensions',
'django-nose',
'lizard-ui >= 4.0b5',
'cassandralib',
'jsonfield',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='TODO@nelen-schuurmans.nl',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
f92e5d6e15ca7aa0447f8a05903212409e545bce
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml',
'cmssw/data/template.html'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
Install the plotting template, too.
|
Install the plotting template, too.
|
Python
|
mit
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
Install the plotting template, too.
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml',
'cmssw/data/template.html'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
<commit_msg>Install the plotting template, too.<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml',
'cmssw/data/template.html'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
Install the plotting template, too.#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml',
'cmssw/data/template.html'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
<commit_msg>Install the plotting template, too.<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='Lobster',
version='1.0',
description='Opportunistic HEP computing tool',
author='Anna Woodard, Matthias Wolf',
url='https://github.com/matz-e/lobster',
packages=['lobster', 'lobster.cmssw'],
package_data={'lobster': [
'cmssw/data/job.py',
'cmssw/data/wrapper.sh',
'cmssw/data/mtab',
'cmssw/data/siteconfig/JobConfig/site-local-config.xml',
'cmssw/data/siteconfig/PhEDEx/storage.xml',
'cmssw/data/template.html'
]},
install_requires=[
'argparse',
'jinja2',
'nose',
'pyyaml',
'python-daemon'
],
entry_points={
'console_scripts': ['lobster = lobster.ui:boil']
}
)
|
c9e4c7335e2753ec58713f1158fdb9e8ce0d3c06
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.2',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.3',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Add classifiers for pypi, update version number.
|
Add classifiers for pypi, update version number.
|
Python
|
mit
|
Ezibenroc/PyRoaringBitMap,Ezibenroc/PyRoaringBitMap
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.2',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
)
Add classifiers for pypi, update version number.
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.3',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.2',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
)
<commit_msg>Add classifiers for pypi, update version number.<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.3',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.2',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
)
Add classifiers for pypi, update version number.#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.3',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.2',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
)
<commit_msg>Add classifiers for pypi, update version number.<commit_after>#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyroaring',
version='0.0.3',
description='Fast and lightweight set for unsigned 32 bits integers.',
url='https://github.com/Ezibenroc/PyRoaringBitMap',
author='Tom Cornebize',
author_email='tom.cornebize@gmail.com',
license='MIT',
packages=[
'pyroaring',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
2778096c6683257d672760908f4c07b0e6a1cedc
|
setup.py
|
setup.py
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=['boto', 'dropbox'],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=[],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
Remove dropbox and boto as dependencies because they are optional and boto is not py3 compat
|
Remove dropbox and boto as dependencies because they are optional and boto is not py3 compat
|
Python
|
bsd-3-clause
|
bahoo/django-dbbackup,bahoo/django-dbbackup
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=['boto', 'dropbox'],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
Remove dropbox and boto as dependencies because they are optional and boto is not py3 compat
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=[],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
<commit_before>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=['boto', 'dropbox'],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
<commit_msg>Remove dropbox and boto as dependencies because they are optional and boto is not py3 compat<commit_after>
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=[],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=['boto', 'dropbox'],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
Remove dropbox and boto as dependencies because they are optional and boto is not py3 compatimport os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=[],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
<commit_before>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=['boto', 'dropbox'],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
<commit_msg>Remove dropbox and boto as dependencies because they are optional and boto is not py3 compat<commit_after>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
packages = []
package_dir = "dbbackup"
for dirpath, dirnames, filenames in os.walk(package_dir):
# ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
if "__init__.py" in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
setup(
name='django-dbbackup',
version='1.9.0',
description='Management commands to help backup and restore a project database to AmazonS3, Dropbox or local disk.',
long_description=read('README.md'),
author='Michael Shepanski',
author_email='mjs7231@gmail.com',
install_requires=[],
license='BSD',
url='http://bitbucket.org/mjs7231/django-dbbackup',
keywords=['django', 'dropbox', 'database', 'backup', 'amazon', 's3'],
packages=packages
)
|
f14b9f954ac63fefeca2ad658b2ab5053fe42699
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'PyOpenWorm',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git#egg=PyOpenWorm@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
Add egg suffix for pyopenworm install dependency
|
Add egg suffix for pyopenworm install dependency
|
Python
|
mit
|
cheelee/ChannelWorm,cheelee/ChannelWorm,VahidGh/ChannelWorm,VahidGh/ChannelWorm,VahidGh/ChannelWorm,openworm/ChannelWorm,gsarma/ChannelWorm,cheelee/ChannelWorm,gsarma/ChannelWorm,gsarma/ChannelWorm,gsarma/ChannelWorm,cheelee/ChannelWorm,openworm/ChannelWorm,VahidGh/ChannelWorm,openworm/ChannelWorm,openworm/ChannelWorm
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
Add egg suffix for pyopenworm install dependency
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'PyOpenWorm',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git#egg=PyOpenWorm@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
<commit_before>from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
<commit_msg>Add egg suffix for pyopenworm install dependency<commit_after>
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'PyOpenWorm',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git#egg=PyOpenWorm@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
Add egg suffix for pyopenworm install dependencyfrom setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'PyOpenWorm',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git#egg=PyOpenWorm@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
<commit_before>from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
<commit_msg>Add egg suffix for pyopenworm install dependency<commit_after>from setuptools import setup
setup(
name='ChannelWorm',
packages=[
'channelworm',
'channelworm.ion_channel',
'channelworm.digitizer',
'channelworm.web_app',
'channelworm.fitter'
],
long_description=open('README.md').read(),
install_requires=[
'Django<=1.8',
'PyNeuroML',
'PyOpenWorm',
'cypy',
'django-formtools',
'django-sql-explorer',
'inspyred',
'neuronunit',
'neurotune',
'pillow',
'pyelectro',
'sciunit',
],
dependency_links=[
'git+https://github.com/scidash/sciunit.git#egg=sciunit',
'git+https://github.com/NeuroML/pyNeuroML.git#egg=PyNeuroML',
'git+https://github.com/pgleeson/pyelectro.git#egg=pyelectro',
'git+https://github.com/pgleeson/neurotune.git#egg=neurotune',
'git+https://github.com/openworm/PyOpenWorm.git#egg=PyOpenWorm@dev',
'git+https://github.com/rgerkin/neuronunit.git#egg=neuronunit',
]
)
|
8b66202f62e04226c46445f12c5edd5ec4b12ad0
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.0',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.1',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.1',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.2',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
Upgrade deps to latest point releases
|
Upgrade deps to latest point releases
- nose 1.3.0 to 1.3.1
- Sphinx 1.2.1 to 1.2.2
|
Python
|
mit
|
TangledWeb/tangled
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.0',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.1',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade deps to latest point releases
- nose 1.3.0 to 1.3.1
- Sphinx 1.2.1 to 1.2.2
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.1',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.2',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.0',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.1',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade deps to latest point releases
- nose 1.3.0 to 1.3.1
- Sphinx 1.2.1 to 1.2.2<commit_after>
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.1',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.2',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.0',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.1',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade deps to latest point releases
- nose 1.3.0 to 1.3.1
- Sphinx 1.2.1 to 1.2.2from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.1',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.2',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.0',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.1',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade deps to latest point releases
- nose 1.3.0 to 1.3.1
- Sphinx 1.2.1 to 1.2.2<commit_after>from setuptools import setup
setup(
name='tangled',
version='0.1a7.dev0',
description='Tangled namespace and utilities',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.scripts',
'tangled.tests',
'tangled.tests.dummy_package',
],
extras_require={
'dev': (
'coverage>=3.7.1',
'nose>=1.3.1',
'pep8>=1.4.6',
'pyflakes>=0.7.3',
'Sphinx>=1.2.2',
'sphinx_rtd_theme>=0.1.5',
)
},
entry_points="""
[console_scripts]
tangled = tangled.__main__:main
[tangled.scripts]
release = tangled.scripts:ReleaseCommand
scaffold = tangled.scripts:ScaffoldCommand
python = tangled.scripts:ShellCommand
test = tangled.scripts:TestCommand
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
bc8e548e51fddc251eb2e915883e3ee57bb9515b
|
zc_common/jwt_auth/utils.py
|
zc_common/jwt_auth/utils.py
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
# The handler from rest_framework_jwt removed user_id, so this is a fork
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
'''Constructs a payload for a user JWT. This is a slimmed down version of
https://github.com/GetBlimp/django-rest-framework-jwt/blob/master/rest_framework_jwt/utils.py#L11
:param User: an object with `pk` and `get_roles()`
:return: A dictionary that can be passed into `jwt_encode_handler`
'''
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
'''
Encodes a payload into a valid JWT.
:param payload: a dictionary
:return: an encoded JWT string
'''
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
Add docstrings to jwt handlers
|
Add docstrings to jwt handlers
|
Python
|
mit
|
ZeroCater/zc_common,ZeroCater/zc_common
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
# The handler from rest_framework_jwt removed user_id, so this is a fork
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
Add docstrings to jwt handlers
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
'''Constructs a payload for a user JWT. This is a slimmed down version of
https://github.com/GetBlimp/django-rest-framework-jwt/blob/master/rest_framework_jwt/utils.py#L11
:param User: an object with `pk` and `get_roles()`
:return: A dictionary that can be passed into `jwt_encode_handler`
'''
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
'''
Encodes a payload into a valid JWT.
:param payload: a dictionary
:return: an encoded JWT string
'''
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
<commit_before>import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
# The handler from rest_framework_jwt removed user_id, so this is a fork
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
<commit_msg>Add docstrings to jwt handlers<commit_after>
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
'''Constructs a payload for a user JWT. This is a slimmed down version of
https://github.com/GetBlimp/django-rest-framework-jwt/blob/master/rest_framework_jwt/utils.py#L11
:param User: an object with `pk` and `get_roles()`
:return: A dictionary that can be passed into `jwt_encode_handler`
'''
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
'''
Encodes a payload into a valid JWT.
:param payload: a dictionary
:return: an encoded JWT string
'''
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
# The handler from rest_framework_jwt removed user_id, so this is a fork
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
Add docstrings to jwt handlersimport jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
'''Constructs a payload for a user JWT. This is a slimmed down version of
https://github.com/GetBlimp/django-rest-framework-jwt/blob/master/rest_framework_jwt/utils.py#L11
:param User: an object with `pk` and `get_roles()`
:return: A dictionary that can be passed into `jwt_encode_handler`
'''
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
'''
Encodes a payload into a valid JWT.
:param payload: a dictionary
:return: an encoded JWT string
'''
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
<commit_before>import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
# The handler from rest_framework_jwt removed user_id, so this is a fork
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
<commit_msg>Add docstrings to jwt handlers<commit_after>import jwt
from rest_framework_jwt.settings import api_settings
def jwt_payload_handler(user):
'''Constructs a payload for a user JWT. This is a slimmed down version of
https://github.com/GetBlimp/django-rest-framework-jwt/blob/master/rest_framework_jwt/utils.py#L11
:param User: an object with `pk` and `get_roles()`
:return: A dictionary that can be passed into `jwt_encode_handler`
'''
payload = {
'id': user.pk,
'roles': user.get_roles(),
}
return payload
def jwt_encode_handler(payload):
'''
Encodes a payload into a valid JWT.
:param payload: a dictionary
:return: an encoded JWT string
'''
return jwt.encode(
payload,
api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
).decode('utf-8')
|
b863b6ce020bb1b9a41b1cd4c81b725d47a06dd8
|
admin.py
|
admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
#date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
#date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
Python
|
mit
|
mback2k/django-app-builds
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
#date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
#date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
#date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
#date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
#date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
#date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Project, Repository, Change, Builder, Build
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class ChangeAdmin(admin.ModelAdmin):
list_display = ('project', 'repository', 'revision', 'when', 'who')
list_filter = ('project', 'repository', 'when')
search_fields = ('revision', 'comments', 'who')
#date_hierarchy = 'when'
class BuilderAdmin(admin.ModelAdmin):
list_display = ('name', 'link')
search_fields = ('name', 'link')
class BuildAdmin(admin.ModelAdmin):
filter_horizontal = ('changes',)
list_display = ('builder', 'number', 'result', 'simplified_result',
'start_time', 'end_time', 'duration')
list_filter = ('builder', 'result', 'simplified_result', 'start_time')
search_fields = ('changes__revision', 'changes__comments', 'changes__who')
#date_hierarchy = 'start_time'
admin.site.register(Project, ProjectAdmin)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Change, ChangeAdmin)
admin.site.register(Builder, BuilderAdmin)
admin.site.register(Build, BuildAdmin)
|
aeb6ce26bdde8697e7beb3d06391a04f500f574a
|
mara_db/__init__.py
|
mara_db/__init__.py
|
from mara_db import config, views, cli
MARA_CONFIG_MODULES = [config]
MARA_NAVIGATION_ENTRY_FNS = [views.navigation_entry]
MARA_ACL_RESOURCES = [views.acl_resource]
MARA_FLASK_BLUEPRINTS = [views.blueprint]
MARA_CLICK_COMMANDS = [cli.migrate]
|
"""Make the functionalities of this package auto-discoverable by mara-app"""
def MARA_CONFIG_MODULES():
from . import config
return [config]
def MARA_FLASK_BLUEPRINTS():
from . import views
return [views.blueprint]
def MARA_AUTOMIGRATE_SQLALCHEMY_MODELS():
return []
def MARA_ACL_RESOURCES():
from . import views
return {'DB Schema': views.acl_resource}
def MARA_CLICK_COMMANDS():
from . import cli
return [cli.migrate]
def MARA_NAVIGATION_ENTRIES():
from . import views
return {'DB Schema': views.navigation_entry()}
|
Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)
|
Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)
|
Python
|
mit
|
mara/mara-db,mara/mara-db
|
from mara_db import config, views, cli
MARA_CONFIG_MODULES = [config]
MARA_NAVIGATION_ENTRY_FNS = [views.navigation_entry]
MARA_ACL_RESOURCES = [views.acl_resource]
MARA_FLASK_BLUEPRINTS = [views.blueprint]
MARA_CLICK_COMMANDS = [cli.migrate]
Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)
|
"""Make the functionalities of this package auto-discoverable by mara-app"""
def MARA_CONFIG_MODULES():
from . import config
return [config]
def MARA_FLASK_BLUEPRINTS():
from . import views
return [views.blueprint]
def MARA_AUTOMIGRATE_SQLALCHEMY_MODELS():
return []
def MARA_ACL_RESOURCES():
from . import views
return {'DB Schema': views.acl_resource}
def MARA_CLICK_COMMANDS():
from . import cli
return [cli.migrate]
def MARA_NAVIGATION_ENTRIES():
from . import views
return {'DB Schema': views.navigation_entry()}
|
<commit_before>from mara_db import config, views, cli
MARA_CONFIG_MODULES = [config]
MARA_NAVIGATION_ENTRY_FNS = [views.navigation_entry]
MARA_ACL_RESOURCES = [views.acl_resource]
MARA_FLASK_BLUEPRINTS = [views.blueprint]
MARA_CLICK_COMMANDS = [cli.migrate]
<commit_msg>Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)<commit_after>
|
"""Make the functionalities of this package auto-discoverable by mara-app"""
def MARA_CONFIG_MODULES():
from . import config
return [config]
def MARA_FLASK_BLUEPRINTS():
from . import views
return [views.blueprint]
def MARA_AUTOMIGRATE_SQLALCHEMY_MODELS():
return []
def MARA_ACL_RESOURCES():
from . import views
return {'DB Schema': views.acl_resource}
def MARA_CLICK_COMMANDS():
from . import cli
return [cli.migrate]
def MARA_NAVIGATION_ENTRIES():
from . import views
return {'DB Schema': views.navigation_entry()}
|
from mara_db import config, views, cli
MARA_CONFIG_MODULES = [config]
MARA_NAVIGATION_ENTRY_FNS = [views.navigation_entry]
MARA_ACL_RESOURCES = [views.acl_resource]
MARA_FLASK_BLUEPRINTS = [views.blueprint]
MARA_CLICK_COMMANDS = [cli.migrate]
Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)"""Make the functionalities of this package auto-discoverable by mara-app"""
def MARA_CONFIG_MODULES():
from . import config
return [config]
def MARA_FLASK_BLUEPRINTS():
from . import views
return [views.blueprint]
def MARA_AUTOMIGRATE_SQLALCHEMY_MODELS():
return []
def MARA_ACL_RESOURCES():
from . import views
return {'DB Schema': views.acl_resource}
def MARA_CLICK_COMMANDS():
from . import cli
return [cli.migrate]
def MARA_NAVIGATION_ENTRIES():
from . import views
return {'DB Schema': views.navigation_entry()}
|
<commit_before>from mara_db import config, views, cli
MARA_CONFIG_MODULES = [config]
MARA_NAVIGATION_ENTRY_FNS = [views.navigation_entry]
MARA_ACL_RESOURCES = [views.acl_resource]
MARA_FLASK_BLUEPRINTS = [views.blueprint]
MARA_CLICK_COMMANDS = [cli.migrate]
<commit_msg>Change MARA_XXX variables to functions to delay importing of imports (requires updating mara-app to 2.0.0)<commit_after>"""Make the functionalities of this package auto-discoverable by mara-app"""
def MARA_CONFIG_MODULES():
from . import config
return [config]
def MARA_FLASK_BLUEPRINTS():
from . import views
return [views.blueprint]
def MARA_AUTOMIGRATE_SQLALCHEMY_MODELS():
return []
def MARA_ACL_RESOURCES():
from . import views
return {'DB Schema': views.acl_resource}
def MARA_CLICK_COMMANDS():
from . import cli
return [cli.migrate]
def MARA_NAVIGATION_ENTRIES():
from . import views
return {'DB Schema': views.navigation_entry()}
|
19a7a44449b4e08253ca9379dd23db50f27d6488
|
markdown_wrapper.py
|
markdown_wrapper.py
|
from __future__ import absolute_import
import sublime
import traceback
ST3 = int(sublime.version()) >= 3000
if ST3:
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
else:
from markdown import Markdown, util
from markdown.extensions import Extension
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
from __future__ import absolute_import
import sublime
import traceback
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
Remove some more ST2 specific code
|
Remove some more ST2 specific code
|
Python
|
mit
|
revolunet/sublimetext-markdown-preview,revolunet/sublimetext-markdown-preview
|
from __future__ import absolute_import
import sublime
import traceback
ST3 = int(sublime.version()) >= 3000
if ST3:
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
else:
from markdown import Markdown, util
from markdown.extensions import Extension
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
Remove some more ST2 specific code
|
from __future__ import absolute_import
import sublime
import traceback
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
<commit_before>from __future__ import absolute_import
import sublime
import traceback
ST3 = int(sublime.version()) >= 3000
if ST3:
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
else:
from markdown import Markdown, util
from markdown.extensions import Extension
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
<commit_msg>Remove some more ST2 specific code<commit_after>
|
from __future__ import absolute_import
import sublime
import traceback
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
from __future__ import absolute_import
import sublime
import traceback
ST3 = int(sublime.version()) >= 3000
if ST3:
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
else:
from markdown import Markdown, util
from markdown.extensions import Extension
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
Remove some more ST2 specific codefrom __future__ import absolute_import
import sublime
import traceback
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
<commit_before>from __future__ import absolute_import
import sublime
import traceback
ST3 = int(sublime.version()) >= 3000
if ST3:
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
else:
from markdown import Markdown, util
from markdown.extensions import Extension
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
<commit_msg>Remove some more ST2 specific code<commit_after>from __future__ import absolute_import
import sublime
import traceback
from markdown import Markdown, util
from markdown.extensions import Extension
import importlib
class StMarkdown(Markdown):
def __init__(self, *args, **kwargs):
Markdown.__init__(self, *args, **kwargs)
self.Meta = {}
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword arguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
try:
if isinstance(ext, util.string_type):
ext = self.build_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
ext.extendMarkdown(self, globals())
elif ext is not None:
raise TypeError(
'Extension "%s.%s" must be of type: "markdown.Extension"'
% (ext.__class__.__module__, ext.__class__.__name__))
except:
print(str(traceback.format_exc()))
continue
return self
|
c1ed5befe3081f6812fc77fc694ea3e82d90f39c
|
telemetry/telemetry/core/backends/facebook_credentials_backend.py
|
telemetry/telemetry/core/backends/facebook_credentials_backend.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
@property
def credentials_type(self):
return 'facebook2'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
""" Facebook credential backend for https client. """
@property
def credentials_type(self):
return 'facebook2'
@property
def url(self):
return 'https://www.facebook.com/'
|
Set facebook_crendentials_backend_2's url to https
|
[Telemetry] Set facebook_crendentials_backend_2's url to https
TBR=tonyg@chromium.org
BUG=428098
Review URL: https://codereview.chromium.org/688113003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301945}
|
Python
|
bsd-3-clause
|
benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
@property
def credentials_type(self):
return 'facebook2'
[Telemetry] Set facebook_crendentials_backend_2's url to https
TBR=tonyg@chromium.org
BUG=428098
Review URL: https://codereview.chromium.org/688113003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301945}
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
""" Facebook credential backend for https client. """
@property
def credentials_type(self):
return 'facebook2'
@property
def url(self):
return 'https://www.facebook.com/'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
@property
def credentials_type(self):
return 'facebook2'
<commit_msg>[Telemetry] Set facebook_crendentials_backend_2's url to https
TBR=tonyg@chromium.org
BUG=428098
Review URL: https://codereview.chromium.org/688113003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301945}<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
""" Facebook credential backend for https client. """
@property
def credentials_type(self):
return 'facebook2'
@property
def url(self):
return 'https://www.facebook.com/'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
@property
def credentials_type(self):
return 'facebook2'
[Telemetry] Set facebook_crendentials_backend_2's url to https
TBR=tonyg@chromium.org
BUG=428098
Review URL: https://codereview.chromium.org/688113003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301945}# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
""" Facebook credential backend for https client. """
@property
def credentials_type(self):
return 'facebook2'
@property
def url(self):
return 'https://www.facebook.com/'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
@property
def credentials_type(self):
return 'facebook2'
<commit_msg>[Telemetry] Set facebook_crendentials_backend_2's url to https
TBR=tonyg@chromium.org
BUG=428098
Review URL: https://codereview.chromium.org/688113003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301945}<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import form_based_credentials_backend
class FacebookCredentialsBackend(
form_based_credentials_backend.FormBasedCredentialsBackend):
@property
def logged_in_javascript(self):
"""Evaluates to true iff already logged in."""
return ('document.getElementById("fbNotificationsList")!== null || '
'document.getElementById("m_home_notice")!== null')
@property
def credentials_type(self):
return 'facebook'
@property
def url(self):
return 'http://www.facebook.com/'
@property
def login_form_id(self):
return 'login_form'
@property
def login_input_id(self):
return 'email'
@property
def password_input_id(self):
return 'pass'
class FacebookCredentialsBackend2(FacebookCredentialsBackend):
""" Facebook credential backend for https client. """
@property
def credentials_type(self):
return 'facebook2'
@property
def url(self):
return 'https://www.facebook.com/'
|
8360bebbd4bf2b2e9d51c7aa16bdb9506a91883e
|
tests/chainer_tests/training_tests/extensions_tests/test_snapshot.py
|
tests/chainer_tests/training_tests/extensions_tests/test_snapshot.py
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
from chainer.training import trigger
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
Add unit test to pass Trigger instance.
|
Add unit test to pass Trigger instance.
|
Python
|
mit
|
okuta/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,cupy/cupy,kiyukuta/chainer,rezoo/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,hvy/chainer,tkerola/chainer,niboshi/chainer,cupy/cupy,niboshi/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,wkentaro/chainer,jnishi/chainer,cupy/cupy,cupy/cupy,chainer/chainer,hvy/chainer,jnishi/chainer,chainer/chainer,wkentaro/chainer,keisuke-umezawa/chainer,chainer/chainer,kashif/chainer,jnishi/chainer,ktnyt/chainer,okuta/chainer,niboshi/chainer,ysekky/chainer,wkentaro/chainer,delta2323/chainer,ktnyt/chainer,keisuke-umezawa/chainer,chainer/chainer,hvy/chainer,niboshi/chainer,aonotas/chainer,anaruse/chainer,ronekko/chainer,pfnet/chainer
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
Add unit test to pass Trigger instance.
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
from chainer.training import trigger
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
<commit_before>import unittest
import mock
from chainer import testing
from chainer.training import extensions
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
<commit_msg>Add unit test to pass Trigger instance.<commit_after>
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
from chainer.training import trigger
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
import unittest
import mock
from chainer import testing
from chainer.training import extensions
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
Add unit test to pass Trigger instance.import unittest
import mock
from chainer import testing
from chainer.training import extensions
from chainer.training import trigger
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
<commit_before>import unittest
import mock
from chainer import testing
from chainer.training import extensions
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
<commit_msg>Add unit test to pass Trigger instance.<commit_after>import unittest
import mock
from chainer import testing
from chainer.training import extensions
from chainer.training import trigger
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshotObject(unittest.TestCase):
def test_trigger(self):
target = mock.MagicMock()
snapshot_object = extensions.snapshot_object(target, 'myfile.dat',
trigger=self.trigger)
self.assertEqual(snapshot_object.trigger, self.trigger)
@testing.parameterize(
{'trigger': ('epoch', 2)},
{'trigger': ('iteration', 10)},
{'trigger': trigger.IntervalTrigger(5, 'epoch')},
{'trigger': trigger.IntervalTrigger(20, 'iteration')},
)
class TestSnapshot(unittest.TestCase):
def test_trigger(self):
snapshot = extensions.snapshot(trigger=self.trigger)
self.assertEqual(snapshot.trigger, self.trigger)
testing.run_module(__name__, __file__)
|
04939189efdc55164af8dc04223c7733664f091f
|
valohai_cli/cli_utils.py
|
valohai_cli/cli_utils.py
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
return retval
click.secho('Sorry, try again.')
continue
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
if retval:
return retval
click.secho('Sorry, try again.')
continue
|
Fix `prompt_from_list` misbehaving with nonlist_validator
|
Fix `prompt_from_list` misbehaving with nonlist_validator
|
Python
|
mit
|
valohai/valohai-cli
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
return retval
click.secho('Sorry, try again.')
continue
Fix `prompt_from_list` misbehaving with nonlist_validator
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
if retval:
return retval
click.secho('Sorry, try again.')
continue
|
<commit_before>import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
return retval
click.secho('Sorry, try again.')
continue
<commit_msg>Fix `prompt_from_list` misbehaving with nonlist_validator<commit_after>
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
if retval:
return retval
click.secho('Sorry, try again.')
continue
|
import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
return retval
click.secho('Sorry, try again.')
continue
Fix `prompt_from_list` misbehaving with nonlist_validatorimport click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
if retval:
return retval
click.secho('Sorry, try again.')
continue
|
<commit_before>import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
return retval
click.secho('Sorry, try again.')
continue
<commit_msg>Fix `prompt_from_list` misbehaving with nonlist_validator<commit_after>import click
def prompt_from_list(options, prompt, nonlist_validator=None):
for i, option in enumerate(options, 1):
click.echo('{number} {name} {description}'.format(
number=click.style('[%3d]' % i, fg='cyan'),
name=option['name'],
description=(
click.style('(%s)' % option['description'], dim=True)
if option.get('description')
else ''
),
))
while True:
answer = click.prompt(prompt)
if answer.isdigit() and (1 <= int(answer) <= len(options)):
return options[int(answer) - 1]
if nonlist_validator:
retval = nonlist_validator(answer)
if retval:
return retval
click.secho('Sorry, try again.')
continue
|
ee2187a4cb52acbedf89c3381459b33297371f6e
|
core/api/views/endpoints.py
|
core/api/views/endpoints.py
|
from flask import Module, jsonify
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class TestModelAPI(MethodView):
@jsonp
def get(self, id=None):
if id:
return jsonify(
code=200,
value=0
)
else:
return jsonify(
code=200,
value=1
)
TestModel_view = TestModelAPI.as_view('test_model_api')
api.add_url_rule(
'/test',
view_func=TestModel_view,
methods=['GET']
)
api.add_url_rule(
'/test/<string:id>',
view_func=TestModel_view,
methods=['GET']
)
|
from flask import Module, jsonify, request
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class CreateUnikernel(MethodView):
@jsonp
def get(self):
return jsonify_status_code(
code=405,
message='HTTP method GET is not allowed for this URL'
)
@jsonp
def post(self):
content = request.get_json(force=False, silent=True)
if not content:
return jsonify_status_code(
code=400,
message='Bad HTTP POST request'
)
else:
# Validate JSON
pass
CreateUnikernel_view = CreateUnikernel.as_view('create_unikernel')
api.add_url_rule(
'/unikernel/create',
view_func=CreateUnikernel_view,
methods=['GET', 'POST']
)
|
Add new Flask MethodView called CreateUnikernel
|
Add new Flask MethodView called CreateUnikernel
|
Python
|
apache-2.0
|
adyasha/dune,onyb/dune,adyasha/dune,adyasha/dune
|
from flask import Module, jsonify
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class TestModelAPI(MethodView):
@jsonp
def get(self, id=None):
if id:
return jsonify(
code=200,
value=0
)
else:
return jsonify(
code=200,
value=1
)
TestModel_view = TestModelAPI.as_view('test_model_api')
api.add_url_rule(
'/test',
view_func=TestModel_view,
methods=['GET']
)
api.add_url_rule(
'/test/<string:id>',
view_func=TestModel_view,
methods=['GET']
)
Add new Flask MethodView called CreateUnikernel
|
from flask import Module, jsonify, request
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class CreateUnikernel(MethodView):
@jsonp
def get(self):
return jsonify_status_code(
code=405,
message='HTTP method GET is not allowed for this URL'
)
@jsonp
def post(self):
content = request.get_json(force=False, silent=True)
if not content:
return jsonify_status_code(
code=400,
message='Bad HTTP POST request'
)
else:
# Validate JSON
pass
CreateUnikernel_view = CreateUnikernel.as_view('create_unikernel')
api.add_url_rule(
'/unikernel/create',
view_func=CreateUnikernel_view,
methods=['GET', 'POST']
)
|
<commit_before>from flask import Module, jsonify
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class TestModelAPI(MethodView):
@jsonp
def get(self, id=None):
if id:
return jsonify(
code=200,
value=0
)
else:
return jsonify(
code=200,
value=1
)
TestModel_view = TestModelAPI.as_view('test_model_api')
api.add_url_rule(
'/test',
view_func=TestModel_view,
methods=['GET']
)
api.add_url_rule(
'/test/<string:id>',
view_func=TestModel_view,
methods=['GET']
)
<commit_msg>Add new Flask MethodView called CreateUnikernel<commit_after>
|
from flask import Module, jsonify, request
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class CreateUnikernel(MethodView):
@jsonp
def get(self):
return jsonify_status_code(
code=405,
message='HTTP method GET is not allowed for this URL'
)
@jsonp
def post(self):
content = request.get_json(force=False, silent=True)
if not content:
return jsonify_status_code(
code=400,
message='Bad HTTP POST request'
)
else:
# Validate JSON
pass
CreateUnikernel_view = CreateUnikernel.as_view('create_unikernel')
api.add_url_rule(
'/unikernel/create',
view_func=CreateUnikernel_view,
methods=['GET', 'POST']
)
|
from flask import Module, jsonify
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class TestModelAPI(MethodView):
@jsonp
def get(self, id=None):
if id:
return jsonify(
code=200,
value=0
)
else:
return jsonify(
code=200,
value=1
)
TestModel_view = TestModelAPI.as_view('test_model_api')
api.add_url_rule(
'/test',
view_func=TestModel_view,
methods=['GET']
)
api.add_url_rule(
'/test/<string:id>',
view_func=TestModel_view,
methods=['GET']
)
Add new Flask MethodView called CreateUnikernelfrom flask import Module, jsonify, request
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class CreateUnikernel(MethodView):
@jsonp
def get(self):
return jsonify_status_code(
code=405,
message='HTTP method GET is not allowed for this URL'
)
@jsonp
def post(self):
content = request.get_json(force=False, silent=True)
if not content:
return jsonify_status_code(
code=400,
message='Bad HTTP POST request'
)
else:
# Validate JSON
pass
CreateUnikernel_view = CreateUnikernel.as_view('create_unikernel')
api.add_url_rule(
'/unikernel/create',
view_func=CreateUnikernel_view,
methods=['GET', 'POST']
)
|
<commit_before>from flask import Module, jsonify
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class TestModelAPI(MethodView):
@jsonp
def get(self, id=None):
if id:
return jsonify(
code=200,
value=0
)
else:
return jsonify(
code=200,
value=1
)
TestModel_view = TestModelAPI.as_view('test_model_api')
api.add_url_rule(
'/test',
view_func=TestModel_view,
methods=['GET']
)
api.add_url_rule(
'/test/<string:id>',
view_func=TestModel_view,
methods=['GET']
)
<commit_msg>Add new Flask MethodView called CreateUnikernel<commit_after>from flask import Module, jsonify, request
from flask.views import MethodView
from core.api.decorators import jsonp
api = Module(
__name__,
url_prefix='/api'
)
def jsonify_status_code(*args, **kw):
response = jsonify(*args, **kw)
response.status_code = kw['code']
return response
@api.route('/')
def index():
"""
The root of the API returns an error
"""
return jsonify_status_code(
code=400,
message='Room no 404: File not found'
)
class CreateUnikernel(MethodView):
@jsonp
def get(self):
return jsonify_status_code(
code=405,
message='HTTP method GET is not allowed for this URL'
)
@jsonp
def post(self):
content = request.get_json(force=False, silent=True)
if not content:
return jsonify_status_code(
code=400,
message='Bad HTTP POST request'
)
else:
# Validate JSON
pass
CreateUnikernel_view = CreateUnikernel.as_view('create_unikernel')
api.add_url_rule(
'/unikernel/create',
view_func=CreateUnikernel_view,
methods=['GET', 'POST']
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.