commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a62d038885dcf0b97c544f3b091f2bfba7cc23d7
|
kitsune/sumo/widgets.py
|
kitsune/sumo/widgets.py
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
Add required renderer argument to Widget.render() call
|
Add required renderer argument to Widget.render() call
mozilla/sumo-project#136
|
Python
|
bsd-3-clause
|
mozilla/kitsune,mozilla/kitsune,mozilla/kitsune,mozilla/kitsune
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
Add required renderer argument to Widget.render() call
mozilla/sumo-project#136
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
<commit_before># Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
<commit_msg>Add required renderer argument to Widget.render() call
mozilla/sumo-project#136<commit_after>
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
Add required renderer argument to Widget.render() call
mozilla/sumo-project#136# Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
<commit_before># Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
<commit_msg>Add required renderer argument to Widget.render() call
mozilla/sumo-project#136<commit_after># Based on http://djangosnippets.org/snippets/1580/
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
00bcddfaf4d64ac76256a50f13fbdcb9dc7a58bd
|
projects/urls.py
|
projects/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add-project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
Change url namespace underscore with -
|
Change url namespace underscore with -
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
Change url namespace underscore with -
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add-project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
<commit_msg>Change url namespace underscore with -<commit_after>
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add-project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
Change url namespace underscore with -from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add-project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
<commit_msg>Change url namespace underscore with -<commit_after>from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add-project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit-project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit-status'),
url(r'^archive/$', 'projects_archive', name='projects-archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
c8cfce2cd4820d937d10dced4472055921342582
|
cyder/core/ctnr/forms.py
|
cyder/core/ctnr/forms.py
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users',)
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users', 'domains', 'ranges', 'workgroups')
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
Remove m2m fields from ctnr edit form
|
Remove m2m fields from ctnr edit form
|
Python
|
bsd-3-clause
|
akeym/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,OSU-Net/cyder,OSU-Net/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,murrown/cyder,murrown/cyder,drkitty/cyder,akeym/cyder
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users',)
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
Remove m2m fields from ctnr edit form
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users', 'domains', 'ranges', 'workgroups')
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
<commit_before>from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users',)
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
<commit_msg>Remove m2m fields from ctnr edit form<commit_after>
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users', 'domains', 'ranges', 'workgroups')
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users',)
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
Remove m2m fields from ctnr edit formfrom django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users', 'domains', 'ranges', 'workgroups')
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
<commit_before>from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users',)
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
<commit_msg>Remove m2m fields from ctnr edit form<commit_after>from django import forms
from cyder.base.constants import LEVELS
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.ctnr.models import Ctnr
class CtnrForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
model = Ctnr
exclude = ('users', 'domains', 'ranges', 'workgroups')
def filter_by_ctnr_all(self, ctnr):
pass
class CtnrUserForm(forms.Form):
level = forms.ChoiceField(widget=forms.RadioSelect,
label="Level*",
choices=[item for item in LEVELS.items()])
class CtnrObjectForm(forms.Form):
obj_type = forms.ChoiceField(
widget=forms.RadioSelect,
label='Type*',
choices=(
('user', 'User'),
('domain', 'Domain'),
('range', 'Range'),
('workgroup', 'Workgroup')))
def __init__(self, *args, **kwargs):
obj_perm = kwargs.pop('obj_perm', False)
super(CtnrObjectForm, self).__init__(*args, **kwargs)
if not obj_perm:
self.fields['obj_type'].choices = (('user', 'User'),)
obj = forms.CharField(
widget=forms.TextInput(attrs={'id': 'object-searchbox'}),
label='Search*')
|
dd4015874d6e7ab377795177876fe46a934bf741
|
testinfra/mon/test_ossec_ruleset.py
|
testinfra/mon/test_ossec_ruleset.py
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
def test_overloaded_tor_guard_does_not_produce_alert(Command, Sudo):
"""Check that using an overloaded guard does not produce an OSSEC alert"""
test_alert = ("Aug 16 21:54:44 app-staging Tor[26695]: [warn] Your Guard "
"<name> (<fingerprint>) is failing a very large amount of "
"circuits. Most likely this means the Tor network is "
"overloaded, but it could also mean an attack against you "
"or potentially the guard itself.")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
assert "Alert to be generated" not in c.stderr
|
Add test to reproduce overloaded Tor guard OSSEC alert
|
Add test to reproduce overloaded Tor guard OSSEC alert
A Tor log event indicating that a Tor guard in use is overloaded
currently produces an OSSEC alert. While this alert is an
excellent candidate to be sent upstream to FPF for analysis,
there is no action that a SecureDrop administrator is expected
to take, making this a spurious OSSEC alert.
This test reproduces this spurious alert and is a regression test
for an OSSEC rule patch.
|
Python
|
agpl-3.0
|
garrettr/securedrop,ehartsuyker/securedrop,micahflee/securedrop,heartsucker/securedrop,conorsch/securedrop,garrettr/securedrop,micahflee/securedrop,conorsch/securedrop,ehartsuyker/securedrop,micahflee/securedrop,conorsch/securedrop,conorsch/securedrop,micahflee/securedrop,heartsucker/securedrop,conorsch/securedrop,garrettr/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,garrettr/securedrop
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
Add test to reproduce overloaded Tor guard OSSEC alert
A Tor log event indicating that a Tor guard in use is overloaded
currently produces an OSSEC alert. While this alert is an
excellent candidate to be sent upstream to FPF for analysis,
there is no action that a SecureDrop administrator is expected
to take, making this a spurious OSSEC alert.
This test reproduces this spurious alert and is a regression test
for an OSSEC rule patch.
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
def test_overloaded_tor_guard_does_not_produce_alert(Command, Sudo):
"""Check that using an overloaded guard does not produce an OSSEC alert"""
test_alert = ("Aug 16 21:54:44 app-staging Tor[26695]: [warn] Your Guard "
"<name> (<fingerprint>) is failing a very large amount of "
"circuits. Most likely this means the Tor network is "
"overloaded, but it could also mean an attack against you "
"or potentially the guard itself.")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
assert "Alert to be generated" not in c.stderr
|
<commit_before>import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
<commit_msg>Add test to reproduce overloaded Tor guard OSSEC alert
A Tor log event indicating that a Tor guard in use is overloaded
currently produces an OSSEC alert. While this alert is an
excellent candidate to be sent upstream to FPF for analysis,
there is no action that a SecureDrop administrator is expected
to take, making this a spurious OSSEC alert.
This test reproduces this spurious alert and is a regression test
for an OSSEC rule patch.<commit_after>
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
def test_overloaded_tor_guard_does_not_produce_alert(Command, Sudo):
"""Check that using an overloaded guard does not produce an OSSEC alert"""
test_alert = ("Aug 16 21:54:44 app-staging Tor[26695]: [warn] Your Guard "
"<name> (<fingerprint>) is failing a very large amount of "
"circuits. Most likely this means the Tor network is "
"overloaded, but it could also mean an attack against you "
"or potentially the guard itself.")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
assert "Alert to be generated" not in c.stderr
|
import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
Add test to reproduce overloaded Tor guard OSSEC alert
A Tor log event indicating that a Tor guard in use is overloaded
currently produces an OSSEC alert. While this alert is an
excellent candidate to be sent upstream to FPF for analysis,
there is no action that a SecureDrop administrator is expected
to take, making this a spurious OSSEC alert.
This test reproduces this spurious alert and is a regression test
for an OSSEC rule patch.import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
def test_overloaded_tor_guard_does_not_produce_alert(Command, Sudo):
"""Check that using an overloaded guard does not produce an OSSEC alert"""
test_alert = ("Aug 16 21:54:44 app-staging Tor[26695]: [warn] Your Guard "
"<name> (<fingerprint>) is failing a very large amount of "
"circuits. Most likely this means the Tor network is "
"overloaded, but it could also mean an attack against you "
"or potentially the guard itself.")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
assert "Alert to be generated" not in c.stderr
|
<commit_before>import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
<commit_msg>Add test to reproduce overloaded Tor guard OSSEC alert
A Tor log event indicating that a Tor guard in use is overloaded
currently produces an OSSEC alert. While this alert is an
excellent candidate to be sent upstream to FPF for analysis,
there is no action that a SecureDrop administrator is expected
to take, making this a spurious OSSEC alert.
This test reproduces this spurious alert and is a regression test
for an OSSEC rule patch.<commit_after>import re
alert_level_regex = re.compile(r"Level: '(\d+)'")
def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo):
"""Check that a denied RWX mmaping produces an OSSEC alert"""
test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied "
"RWX mmap of <anonymous mapping> by /usr/sbin/apache2"
"[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent "
"/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
# Level 7 alert should be triggered by rule 100101
assert "Alert to be generated" in c.stderr
alert_level = alert_level_regex.findall(c.stderr)[0]
assert alert_level == "7"
def test_overloaded_tor_guard_does_not_produce_alert(Command, Sudo):
"""Check that using an overloaded guard does not produce an OSSEC alert"""
test_alert = ("Aug 16 21:54:44 app-staging Tor[26695]: [warn] Your Guard "
"<name> (<fingerprint>) is failing a very large amount of "
"circuits. Most likely this means the Tor network is "
"overloaded, but it could also mean an attack against you "
"or potentially the guard itself.")
with Sudo():
c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
test_alert))
assert "Alert to be generated" not in c.stderr
|
50621ef5b141470879a786088391a516b4f63d52
|
note/models.py
|
note/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
Migrate to a custom User class.
|
Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.
|
Python
|
bsd-2-clause
|
LeMeteore/boomer2
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.
|
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
<commit_msg>Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.<commit_after>
|
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
<commit_msg>Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.<commit_after>from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
3b28a1fa47d4e2339f2219eaf688b88b5901afea
|
migrations/versions/0074_update_sms_rate.py
|
migrations/versions/0074_update_sms_rate.py
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0072_add_dvla_orgs
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0073_add_international_sms_flag
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
Fix db migration merge conflicts
|
Fix db migration merge conflicts
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0072_add_dvla_orgs
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")Fix db migration merge conflicts
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0073_add_international_sms_flag
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
<commit_before>"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0072_add_dvla_orgs
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")<commit_msg>Fix db migration merge conflicts<commit_after>
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0073_add_international_sms_flag
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0072_add_dvla_orgs
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")Fix db migration merge conflicts"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0073_add_international_sms_flag
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
<commit_before>"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0072_add_dvla_orgs
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")<commit_msg>Fix db migration merge conflicts<commit_after>"""empty message
Revision ID: 0074_update_sms_rate
Revises: 0073_add_international_sms_flag
Create Date: 2017-04-24 12:10:02.116278
"""
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
|
529ab85ac8a25b05690f507ed67ba767d4fb53db
|
pyEchosign/utils/handle_response.py
|
pyEchosign/utils/handle_response.py
|
from requests import Response
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
response_json = response.json()
code = response_json.get('code', None)
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
def response_success(response: Response):
return 199 < response.status_code < 300
|
from requests import Response
from exceptions.internal_exceptions import ApiError
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
elif not response_success(response):
try:
json_response = response.json()
except ValueError:
json_response = ''
raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following '
f'JSON: "{json_response}" and content: "{response.content}""')
def response_success(response: Response):
return 199 < response.status_code < 300
|
Check for json() ValueError with requests when raising an ApiError in check_error()
|
Check for json() ValueError with requests when raising an ApiError in check_error()
|
Python
|
mit
|
JensAstrup/pyEchosign
|
from requests import Response
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
response_json = response.json()
code = response_json.get('code', None)
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
def response_success(response: Response):
return 199 < response.status_code < 300
Check for json() ValueError with requests when raising an ApiError in check_error()
|
from requests import Response
from exceptions.internal_exceptions import ApiError
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
elif not response_success(response):
try:
json_response = response.json()
except ValueError:
json_response = ''
raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following '
f'JSON: "{json_response}" and content: "{response.content}""')
def response_success(response: Response):
return 199 < response.status_code < 300
|
<commit_before>from requests import Response
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
response_json = response.json()
code = response_json.get('code', None)
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
def response_success(response: Response):
return 199 < response.status_code < 300
<commit_msg>Check for json() ValueError with requests when raising an ApiError in check_error()<commit_after>
|
from requests import Response
from exceptions.internal_exceptions import ApiError
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
elif not response_success(response):
try:
json_response = response.json()
except ValueError:
json_response = ''
raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following '
f'JSON: "{json_response}" and content: "{response.content}""')
def response_success(response: Response):
return 199 < response.status_code < 300
|
from requests import Response
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
response_json = response.json()
code = response_json.get('code', None)
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
def response_success(response: Response):
return 199 < response.status_code < 300
Check for json() ValueError with requests when raising an ApiError in check_error()from requests import Response
from exceptions.internal_exceptions import ApiError
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
elif not response_success(response):
try:
json_response = response.json()
except ValueError:
json_response = ''
raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following '
f'JSON: "{json_response}" and content: "{response.content}""')
def response_success(response: Response):
return 199 < response.status_code < 300
|
<commit_before>from requests import Response
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
response_json = response.json()
code = response_json.get('code', None)
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
def response_success(response: Response):
return 199 < response.status_code < 300
<commit_msg>Check for json() ValueError with requests when raising an ApiError in check_error()<commit_after>from requests import Response
from exceptions.internal_exceptions import ApiError
def check_error(response: Response):
""" Takes a requests package response object and checks the error code and raises the proper exception """
if response.status_code == 401:
raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your '
'account should have access to perform this action.')
elif not response_success(response):
try:
json_response = response.json()
except ValueError:
json_response = ''
raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following '
f'JSON: "{json_response}" and content: "{response.content}""')
def response_success(response: Response):
return 199 < response.status_code < 300
|
42e4e5e78779b4c683fc42fb4c45bb600e96afe3
|
probe/controllers/braintasks.py
|
probe/controllers/braintasks.py
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.scan_tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
Fix new name for scan tasks
|
Fix new name for scan tasks
|
Python
|
apache-2.0
|
hirokihamasaki/irma,hirokihamasaki/irma,hirokihamasaki/irma,hirokihamasaki/irma,quarkslab/irma,hirokihamasaki/irma,quarkslab/irma,quarkslab/irma,quarkslab/irma
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
Fix new name for scan tasks
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.scan_tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
<commit_before># Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
<commit_msg>Fix new name for scan tasks<commit_after>
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.scan_tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
Fix new name for scan tasks# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.scan_tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
<commit_before># Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
<commit_msg>Fix new name for scan tasks<commit_after># Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import celery
import config.parser as config
from probe.helpers.celerytasks import async_call
# declare a new Remote Brain application
brain_app = celery.Celery('braintasks')
config.conf_brain_celery(brain_app)
config.configure_syslog(brain_app)
# ============
# Task calls
# ============
def register_probe(name, display_name, category, mimetype_regexp):
""" send a task to the brain to register local probes"""
task = async_call(brain_app, "brain.scan_tasks", "register_probe",
args=[name, display_name, category, mimetype_regexp])
return task
|
b9cf2f0daf2ca360c64d1268d50cab9c07020222
|
test_engine.py
|
test_engine.py
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
def test_is_coord_on_board():
for coord in VALID_COORDS:
assert engine._is_coord_on_board(coord) is True
for coord in INVALID_COORDS:
assert engine._is_coord_on_board(coord) is False
|
Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwise
|
Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwise
|
Python
|
mit
|
EyuelAbebe/gamer,EyuelAbebe/gamer
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwise
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
def test_is_coord_on_board():
for coord in VALID_COORDS:
assert engine._is_coord_on_board(coord) is True
for coord in INVALID_COORDS:
assert engine._is_coord_on_board(coord) is False
|
<commit_before>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
<commit_msg>Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwise<commit_after>
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
def test_is_coord_on_board():
for coord in VALID_COORDS:
assert engine._is_coord_on_board(coord) is True
for coord in INVALID_COORDS:
assert engine._is_coord_on_board(coord) is False
|
import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwiseimport engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
def test_is_coord_on_board():
for coord in VALID_COORDS:
assert engine._is_coord_on_board(coord) is True
for coord in INVALID_COORDS:
assert engine._is_coord_on_board(coord) is False
|
<commit_before>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
<commit_msg>Add test_is_coord_on_board() to assert the function returns True if the coordinate is on the board and False otherwise<commit_after>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
def test_is_coord_on_board():
for coord in VALID_COORDS:
assert engine._is_coord_on_board(coord) is True
for coord in INVALID_COORDS:
assert engine._is_coord_on_board(coord) is False
|
0cecbabd2d594bfc2ca57e522658d13eda2bc6a8
|
pipdiff/pipdiff.py
|
pipdiff/pipdiff.py
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
from xmlrpclib import ServerProxy
pypi = ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if version > distribution.parsed_version:
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print "{0:40} {1}".format(local, remote)
return True
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
try:
from xmlrpclib import ServerProxy
except ImportError:
import xmlrpc.client
try:
pypi = ServerProxy("http://pypi.python.org/pypi")
except NameError:
pypi = xmlrpc.client.ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if str(version) > str(distribution.parsed_version):
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print("{0:40} {1}".format(local, remote))
return True
if __name__ == '__main__':
main()
|
Add support for Python 3
|
Add support for Python 3
|
Python
|
bsd-3-clause
|
ogt/pipdiff
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
from xmlrpclib import ServerProxy
pypi = ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if version > distribution.parsed_version:
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print "{0:40} {1}".format(local, remote)
return True
if __name__ == '__main__':
main()
Add support for Python 3
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
try:
from xmlrpclib import ServerProxy
except ImportError:
import xmlrpc.client
try:
pypi = ServerProxy("http://pypi.python.org/pypi")
except NameError:
pypi = xmlrpc.client.ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if str(version) > str(distribution.parsed_version):
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print("{0:40} {1}".format(local, remote))
return True
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
from xmlrpclib import ServerProxy
pypi = ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if version > distribution.parsed_version:
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print "{0:40} {1}".format(local, remote)
return True
if __name__ == '__main__':
main()
<commit_msg>Add support for Python 3<commit_after>
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
try:
from xmlrpclib import ServerProxy
except ImportError:
import xmlrpc.client
try:
pypi = ServerProxy("http://pypi.python.org/pypi")
except NameError:
pypi = xmlrpc.client.ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if str(version) > str(distribution.parsed_version):
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print("{0:40} {1}".format(local, remote))
return True
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
from xmlrpclib import ServerProxy
pypi = ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if version > distribution.parsed_version:
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print "{0:40} {1}".format(local, remote)
return True
if __name__ == '__main__':
main()
Add support for Python 3#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
try:
from xmlrpclib import ServerProxy
except ImportError:
import xmlrpc.client
try:
pypi = ServerProxy("http://pypi.python.org/pypi")
except NameError:
pypi = xmlrpc.client.ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if str(version) > str(distribution.parsed_version):
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print("{0:40} {1}".format(local, remote))
return True
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
from xmlrpclib import ServerProxy
pypi = ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if version > distribution.parsed_version:
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print "{0:40} {1}".format(local, remote)
return True
if __name__ == '__main__':
main()
<commit_msg>Add support for Python 3<commit_after>#!/usr/bin/env python
# Original author : Jonathan Zempel, https://github.com/jzempel
# Copied from https://gist.github.com/jzempel/4624227
# Copied here for the purpose of adding it to PyPI
from pkg_resources import parse_version
try:
from xmlrpclib import ServerProxy
except ImportError:
import xmlrpc.client
try:
pypi = ServerProxy("http://pypi.python.org/pypi")
except NameError:
pypi = xmlrpc.client.ServerProxy("http://pypi.python.org/pypi")
def main():
try:
from pip import get_installed_distributions
except ImportError:
from sys import exit
exit("pip not available")
for distribution in sorted(get_installed_distributions(),
key=lambda distribution: distribution.project_name):
remote = ''
project_name = distribution.project_name
releases = pypi.package_releases(project_name)
if not releases:
pypi.package_releases(project_name.capitalize())
if releases:
version = parse_version(releases[0])
if str(version) > str(distribution.parsed_version):
remote = "PyPI:{0}=={1}".format(project_name, releases[0])
else:
remote = "PyPI:{0} not found".format(project_name)
local = "{0}=={1}".format(project_name, distribution.version)
print("{0:40} {1}".format(local, remote))
return True
if __name__ == '__main__':
main()
|
8f4c5b6a4c609e5154dfee432c567e382f69ee88
|
src/geoserver/layer.py
|
src/geoserver/layer.py
|
from urllib2 import HTTPError
from geoserver.support import atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer:
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
try:
layer = get_xml(self.href)
self.name = layer.find("name").text
self.attribution = layer.find("attribution").text
self.enabled = layer.find("enabled").text == "true"
self.default_style = Style(layer.find("defaultStyle"))
resource = layer.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
except HTTPError, e:
print e.geturl()
def __repr__(self):
return "Layer[%s]" % self.name
|
from urllib2 import HTTPError
from geoserver.support import ResourceInfo, atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer(ResourceInfo):
resource_type = "layers"
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
name = self.metadata.find("name")
attribution = self.metadata.find("attribution")
enabled = self.metadata.find("enabled")
default_style = self.metadata.find("defaultStyle")
if name is not None:
self.name = name.text
else:
self.name = None
if attribution is not None:
self.attribution = attribution.text
else:
self.attribution = None
if enabled is not None and enabled.text == "true":
self.enabled = True
else:
self.enabled = False
if default_style is not None:
self.default_style = Style(default_style)
else:
self.default_style = None
resource = self.metadata.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
def __repr__(self):
return "Layer[%s]" % self.name
|
Update Layer to use ResourceInfo support class
|
Update Layer to use ResourceInfo support class
|
Python
|
mit
|
boundlessgeo/gsconfig,garnertb/gsconfig.py,Geode/gsconfig,cristianzamar/gsconfig,scottp-dpaw/gsconfig,afabiani/gsconfig
|
from urllib2 import HTTPError
from geoserver.support import atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer:
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
try:
layer = get_xml(self.href)
self.name = layer.find("name").text
self.attribution = layer.find("attribution").text
self.enabled = layer.find("enabled").text == "true"
self.default_style = Style(layer.find("defaultStyle"))
resource = layer.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
except HTTPError, e:
print e.geturl()
def __repr__(self):
return "Layer[%s]" % self.name
Update Layer to use ResourceInfo support class
|
from urllib2 import HTTPError
from geoserver.support import ResourceInfo, atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer(ResourceInfo):
resource_type = "layers"
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
name = self.metadata.find("name")
attribution = self.metadata.find("attribution")
enabled = self.metadata.find("enabled")
default_style = self.metadata.find("defaultStyle")
if name is not None:
self.name = name.text
else:
self.name = None
if attribution is not None:
self.attribution = attribution.text
else:
self.attribution = None
if enabled is not None and enabled.text == "true":
self.enabled = True
else:
self.enabled = False
if default_style is not None:
self.default_style = Style(default_style)
else:
self.default_style = None
resource = self.metadata.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
def __repr__(self):
return "Layer[%s]" % self.name
|
<commit_before>from urllib2 import HTTPError
from geoserver.support import atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer:
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
try:
layer = get_xml(self.href)
self.name = layer.find("name").text
self.attribution = layer.find("attribution").text
self.enabled = layer.find("enabled").text == "true"
self.default_style = Style(layer.find("defaultStyle"))
resource = layer.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
except HTTPError, e:
print e.geturl()
def __repr__(self):
return "Layer[%s]" % self.name
<commit_msg>Update Layer to use ResourceInfo support class<commit_after>
|
from urllib2 import HTTPError
from geoserver.support import ResourceInfo, atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer(ResourceInfo):
resource_type = "layers"
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
name = self.metadata.find("name")
attribution = self.metadata.find("attribution")
enabled = self.metadata.find("enabled")
default_style = self.metadata.find("defaultStyle")
if name is not None:
self.name = name.text
else:
self.name = None
if attribution is not None:
self.attribution = attribution.text
else:
self.attribution = None
if enabled is not None and enabled.text == "true":
self.enabled = True
else:
self.enabled = False
if default_style is not None:
self.default_style = Style(default_style)
else:
self.default_style = None
resource = self.metadata.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
def __repr__(self):
return "Layer[%s]" % self.name
|
from urllib2 import HTTPError
from geoserver.support import atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer:
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
try:
layer = get_xml(self.href)
self.name = layer.find("name").text
self.attribution = layer.find("attribution").text
self.enabled = layer.find("enabled").text == "true"
self.default_style = Style(layer.find("defaultStyle"))
resource = layer.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
except HTTPError, e:
print e.geturl()
def __repr__(self):
return "Layer[%s]" % self.name
Update Layer to use ResourceInfo support classfrom urllib2 import HTTPError
from geoserver.support import ResourceInfo, atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer(ResourceInfo):
resource_type = "layers"
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
name = self.metadata.find("name")
attribution = self.metadata.find("attribution")
enabled = self.metadata.find("enabled")
default_style = self.metadata.find("defaultStyle")
if name is not None:
self.name = name.text
else:
self.name = None
if attribution is not None:
self.attribution = attribution.text
else:
self.attribution = None
if enabled is not None and enabled.text == "true":
self.enabled = True
else:
self.enabled = False
if default_style is not None:
self.default_style = Style(default_style)
else:
self.default_style = None
resource = self.metadata.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
def __repr__(self):
return "Layer[%s]" % self.name
|
<commit_before>from urllib2 import HTTPError
from geoserver.support import atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer:
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
try:
layer = get_xml(self.href)
self.name = layer.find("name").text
self.attribution = layer.find("attribution").text
self.enabled = layer.find("enabled").text == "true"
self.default_style = Style(layer.find("defaultStyle"))
resource = layer.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
except HTTPError, e:
print e.geturl()
def __repr__(self):
return "Layer[%s]" % self.name
<commit_msg>Update Layer to use ResourceInfo support class<commit_after>from urllib2 import HTTPError
from geoserver.support import ResourceInfo, atom_link, get_xml
from geoserver.style import Style
from geoserver.resource import FeatureType, Coverage
class Layer(ResourceInfo):
resource_type = "layers"
def __init__(self, node):
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
name = self.metadata.find("name")
attribution = self.metadata.find("attribution")
enabled = self.metadata.find("enabled")
default_style = self.metadata.find("defaultStyle")
if name is not None:
self.name = name.text
else:
self.name = None
if attribution is not None:
self.attribution = attribution.text
else:
self.attribution = None
if enabled is not None and enabled.text == "true":
self.enabled = True
else:
self.enabled = False
if default_style is not None:
self.default_style = Style(default_style)
else:
self.default_style = None
resource = self.metadata.find("resource")
if resource and "class" in resource.attrib:
if resource.attrib["class"] == "featureType":
self.resource = FeatureType(resource)
elif resource.attrib["class"] == "coverage":
self.resource = Coverage(resource)
def __repr__(self):
return "Layer[%s]" % self.name
|
edf465bd80b20f151064ac39ba4d0c1cd9643e1d
|
stix2/test/v21/test_base.py
|
stix2/test/v21/test_base.py
|
import datetime as dt
import json
import pytest
import pytz
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
|
import datetime as dt
import json
import uuid
import pytest
import pytz
import stix2
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
def test_deterministic_id_unicode():
mutex = {'name': u'D*Fl#Ed*\u00a3\u00a8', 'type': 'mutex'}
obs = stix2.parse_observable(mutex, version="2.1")
dd_idx = obs.id.index("--")
id_uuid = uuid.UUID(obs.id[dd_idx+2:])
assert id_uuid.variant == uuid.RFC_4122
assert id_uuid.version == 5
|
Add a unit test for deterministic ID, with unicode
|
Add a unit test for deterministic ID, with unicode
|
Python
|
bsd-3-clause
|
oasis-open/cti-python-stix2
|
import datetime as dt
import json
import pytest
import pytz
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
Add a unit test for deterministic ID, with unicode
|
import datetime as dt
import json
import uuid
import pytest
import pytz
import stix2
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
def test_deterministic_id_unicode():
mutex = {'name': u'D*Fl#Ed*\u00a3\u00a8', 'type': 'mutex'}
obs = stix2.parse_observable(mutex, version="2.1")
dd_idx = obs.id.index("--")
id_uuid = uuid.UUID(obs.id[dd_idx+2:])
assert id_uuid.variant == uuid.RFC_4122
assert id_uuid.version == 5
|
<commit_before>import datetime as dt
import json
import pytest
import pytz
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
<commit_msg>Add a unit test for deterministic ID, with unicode<commit_after>
|
import datetime as dt
import json
import uuid
import pytest
import pytz
import stix2
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
def test_deterministic_id_unicode():
mutex = {'name': u'D*Fl#Ed*\u00a3\u00a8', 'type': 'mutex'}
obs = stix2.parse_observable(mutex, version="2.1")
dd_idx = obs.id.index("--")
id_uuid = uuid.UUID(obs.id[dd_idx+2:])
assert id_uuid.variant == uuid.RFC_4122
assert id_uuid.version == 5
|
import datetime as dt
import json
import pytest
import pytz
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
Add a unit test for deterministic ID, with unicodeimport datetime as dt
import json
import uuid
import pytest
import pytz
import stix2
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
def test_deterministic_id_unicode():
mutex = {'name': u'D*Fl#Ed*\u00a3\u00a8', 'type': 'mutex'}
obs = stix2.parse_observable(mutex, version="2.1")
dd_idx = obs.id.index("--")
id_uuid = uuid.UUID(obs.id[dd_idx+2:])
assert id_uuid.variant == uuid.RFC_4122
assert id_uuid.version == 5
|
<commit_before>import datetime as dt
import json
import pytest
import pytz
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
<commit_msg>Add a unit test for deterministic ID, with unicode<commit_after>import datetime as dt
import json
import uuid
import pytest
import pytz
import stix2
from stix2.base import STIXJSONEncoder
def test_encode_json_datetime():
now = dt.datetime(2017, 3, 22, 0, 0, 0, tzinfo=pytz.UTC)
test_dict = {'now': now}
expected = '{"now": "2017-03-22T00:00:00Z"}'
assert json.dumps(test_dict, cls=STIXJSONEncoder) == expected
def test_encode_json_object():
obj = object()
test_dict = {'obj': obj}
with pytest.raises(TypeError) as excinfo:
json.dumps(test_dict, cls=STIXJSONEncoder)
assert " is not JSON serializable" in str(excinfo.value)
def test_deterministic_id_unicode():
mutex = {'name': u'D*Fl#Ed*\u00a3\u00a8', 'type': 'mutex'}
obs = stix2.parse_observable(mutex, version="2.1")
dd_idx = obs.id.index("--")
id_uuid = uuid.UUID(obs.id[dd_idx+2:])
assert id_uuid.variant == uuid.RFC_4122
assert id_uuid.version == 5
|
3ec4b43e0665e940be9460788fa2d5bfb44b2929
|
portal/main.py
|
portal/main.py
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
portal = Portal.DynatracePortal(args.username, args.password)
portal.login()
for chart in tqdm.tqdm(args.chart_names):
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print("Initializing Phantom JS web driver")
portal = Portal.DynatracePortal(args.username, args.password)
print("Initialized Phantom JS web driver")
print("Logging in to Dynatrace portal")
portal.login()
print("Successfully logged in to Dynatrace portal")
for chart in tqdm.tqdm(args.chart_names):
tqdm.tqdm.write("Beginning to process chart: {}".format(chart))
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
Add more messages to commandline output
|
Add more messages to commandline output
|
Python
|
mit
|
josecolella/Dynatrace-Resources
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
portal = Portal.DynatracePortal(args.username, args.password)
portal.login()
for chart in tqdm.tqdm(args.chart_names):
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
Add more messages to commandline output
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print("Initializing Phantom JS web driver")
portal = Portal.DynatracePortal(args.username, args.password)
print("Initialized Phantom JS web driver")
print("Logging in to Dynatrace portal")
portal.login()
print("Successfully logged in to Dynatrace portal")
for chart in tqdm.tqdm(args.chart_names):
tqdm.tqdm.write("Beginning to process chart: {}".format(chart))
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
<commit_before>import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
portal = Portal.DynatracePortal(args.username, args.password)
portal.login()
for chart in tqdm.tqdm(args.chart_names):
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
<commit_msg>Add more messages to commandline output<commit_after>
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print("Initializing Phantom JS web driver")
portal = Portal.DynatracePortal(args.username, args.password)
print("Initialized Phantom JS web driver")
print("Logging in to Dynatrace portal")
portal.login()
print("Successfully logged in to Dynatrace portal")
for chart in tqdm.tqdm(args.chart_names):
tqdm.tqdm.write("Beginning to process chart: {}".format(chart))
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
portal = Portal.DynatracePortal(args.username, args.password)
portal.login()
for chart in tqdm.tqdm(args.chart_names):
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
Add more messages to commandline outputimport argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print("Initializing Phantom JS web driver")
portal = Portal.DynatracePortal(args.username, args.password)
print("Initialized Phantom JS web driver")
print("Logging in to Dynatrace portal")
portal.login()
print("Successfully logged in to Dynatrace portal")
for chart in tqdm.tqdm(args.chart_names):
tqdm.tqdm.write("Beginning to process chart: {}".format(chart))
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
<commit_before>import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
portal = Portal.DynatracePortal(args.username, args.password)
portal.login()
for chart in tqdm.tqdm(args.chart_names):
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
<commit_msg>Add more messages to commandline output<commit_after>import argparse
import logging
import Portal
import tqdm
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="Dynatrace Synthetic Screenshot Automation")
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chart-names", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print("Initializing Phantom JS web driver")
portal = Portal.DynatracePortal(args.username, args.password)
print("Initialized Phantom JS web driver")
print("Logging in to Dynatrace portal")
portal.login()
print("Successfully logged in to Dynatrace portal")
for chart in tqdm.tqdm(args.chart_names):
tqdm.tqdm.write("Beginning to process chart: {}".format(chart))
portal.saveChartToScreenshot(
chartName=chart, specificElements=["tag", "svg", "class", "gwt-ScrollTable"], saveDir=args.directory)
tqdm.tqdm.write("Finished saving image: \"{chartName}\" screenshot to {directory} directory".format(
chartName=chart, directory=args.directory))
|
a9a121d5fe595f54ed482ec162dc7a9703a65c13
|
tp/__init__.py
|
tp/__init__.py
|
__import__('pkg_resources').declare_namespace(__name__)
try:
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
try:
__import__('pkg_resources').declare_namespace(__name__)
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
Fix for people without setuptools.
|
Fix for people without setuptools.
|
Python
|
lgpl-2.1
|
thousandparsec/libtpproto-py,thousandparsec/libtpproto-py
|
__import__('pkg_resources').declare_namespace(__name__)
try:
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
Fix for people without setuptools.
|
try:
__import__('pkg_resources').declare_namespace(__name__)
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
<commit_before>__import__('pkg_resources').declare_namespace(__name__)
try:
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
<commit_msg>Fix for people without setuptools.<commit_after>
|
try:
__import__('pkg_resources').declare_namespace(__name__)
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
__import__('pkg_resources').declare_namespace(__name__)
try:
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
Fix for people without setuptools.try:
__import__('pkg_resources').declare_namespace(__name__)
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
<commit_before>__import__('pkg_resources').declare_namespace(__name__)
try:
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
<commit_msg>Fix for people without setuptools.<commit_after>try:
__import__('pkg_resources').declare_namespace(__name__)
import modulefinder
for p in __path__:
modulefinder.AddPackagePath(__name__, p)
except Exception, e:
import warnings
warnings.warn(e, RuntimeWarning)
|
1d07bcd8a953b477275175b754d054a584dcdbcf
|
redditcrawl.py
|
redditcrawl.py
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
#print(dir(submission))
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0,", "\"id\": 0,\n\t\t\"submitted_by\": \""+submission.author.name+"\",")
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
Modify crawler to save name of user who contributed an entry
|
Modify crawler to save name of user who contributed an entry
|
Python
|
agpl-3.0
|
RolandR/place-atlas,RolandR/place-atlas,RolandR/place-atlas,RolandR/place-atlas
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
Modify crawler to save name of user who contributed an entry
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
#print(dir(submission))
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0,", "\"id\": 0,\n\t\t\"submitted_by\": \""+submission.author.name+"\",")
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
<commit_before>
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
<commit_msg>Modify crawler to save name of user who contributed an entry<commit_after>
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
#print(dir(submission))
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0,", "\"id\": 0,\n\t\t\"submitted_by\": \""+submission.author.name+"\",")
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
Modify crawler to save name of user who contributed an entry
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
#print(dir(submission))
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0,", "\"id\": 0,\n\t\t\"submitted_by\": \""+submission.author.name+"\",")
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
<commit_before>
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
<commit_msg>Modify crawler to save name of user who contributed an entry<commit_after>
#6y7LtOjoNEfe72g62kZfwtFHMWkQ8XsZvcQ8xZDe
import praw
outfile = open('temp.js', 'w')
credentials = open('credentials', 'r')
client_id = credentials.readline().strip(' \t\n\r')
client_secret = credentials.readline().strip(' \t\n\r')
startId = 466
reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent='atlas_bot')
for submission in reddit.subreddit('placeAtlas').new(limit=1000):
#print(dir(submission))
if(submission.link_flair_text == "New Entry"):
text = submission.selftext
text = text.replace("\"id\": 0,", "\"id\": 0,\n\t\t\"submitted_by\": \""+submission.author.name+"\",")
text = text.replace("\"id\": 0", "\"id\": "+str(startId))
startId = startId + 1
outfile.write(text+",")
print("written "+submission.title)
else:
print("skipped "+submission.title)
|
7ebadc3a1befa265dfc65e78dfbe98041b96d076
|
serial_com_test/raspberry_pi/test.py
|
serial_com_test/raspberry_pi/test.py
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
Update SERIAL_DEVICE to match the Raspberry Pi
|
Update SERIAL_DEVICE to match the Raspberry Pi
|
Python
|
mit
|
zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
Update SERIAL_DEVICE to match the Raspberry Pi
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
<commit_before>import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
<commit_msg>Update SERIAL_DEVICE to match the Raspberry Pi<commit_after>
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
Update SERIAL_DEVICE to match the Raspberry Piimport serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
<commit_before>import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
<commit_msg>Update SERIAL_DEVICE to match the Raspberry Pi<commit_after>import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
ba842af48c1d137584811d75d15c3b7ceddc2372
|
pychecker2/File.py
|
pychecker2/File.py
|
from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
from pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
Add more ways to suck line numbers from nodes
|
Add more ways to suck line numbers from nodes
|
Python
|
bsd-3-clause
|
mitar/pychecker,mitar/pychecker
|
from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
Add more ways to suck line numbers from nodes
|
from pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
<commit_before>from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
<commit_msg>Add more ways to suck line numbers from nodes<commit_after>
|
from pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
Add more ways to suck line numbers from nodesfrom pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
<commit_before>from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
<commit_msg>Add more ways to suck line numbers from nodes<commit_after>from pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
9c339d28ae899740281b085cbb2b8fd73425249c
|
democracy/views/label.py
|
democracy/views/label.py
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
Add empty line for PEP8
|
Add empty line for PEP8
|
Python
|
mit
|
City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
Add empty line for PEP8
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
<commit_before>from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
<commit_msg>Add empty line for PEP8<commit_after>
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
Add empty line for PEP8from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
<commit_before>from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
<commit_msg>Add empty line for PEP8<commit_after>from rest_framework import serializers, viewsets, filters
import django_filters
from democracy.models import Label
from democracy.pagination import DefaultLimitPagination
class LabelFilter(django_filters.FilterSet):
label = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = Label
fields = ['label']
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('id', 'label')
class LabelViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = LabelSerializer
queryset = Label.objects.all()
pagination_class = DefaultLimitPagination
filter_backends = (filters.DjangoFilterBackend,)
filter_class = LabelFilter
|
9ec35300975a141162749cba015cedbe900f97eb
|
idiotscript/Collector.py
|
idiotscript/Collector.py
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._current_group.append(new_input)
def finalise_group(self):
self._groups.append(self._current_group)
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._groups.append(self._current_group)
self._current_group.append(new_input)
def finalise_group(self):
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
Fix bug with collector losing last group of input
|
Fix bug with collector losing last group of input
This means the script runner doesn't have to manually finalise the last
input, which was always a bit silly. In fact, the whole metaphor is
rather silly. I should change it to be "start new group" instead.
|
Python
|
unlicense
|
djmattyg007/IdiotScript
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._current_group.append(new_input)
def finalise_group(self):
self._groups.append(self._current_group)
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
Fix bug with collector losing last group of input
This means the script runner doesn't have to manually finalise the last
input, which was always a bit silly. In fact, the whole metaphor is
rather silly. I should change it to be "start new group" instead.
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._groups.append(self._current_group)
self._current_group.append(new_input)
def finalise_group(self):
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
<commit_before>class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._current_group.append(new_input)
def finalise_group(self):
self._groups.append(self._current_group)
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
<commit_msg>Fix bug with collector losing last group of input
This means the script runner doesn't have to manually finalise the last
input, which was always a bit silly. In fact, the whole metaphor is
rather silly. I should change it to be "start new group" instead.<commit_after>
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._groups.append(self._current_group)
self._current_group.append(new_input)
def finalise_group(self):
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._current_group.append(new_input)
def finalise_group(self):
self._groups.append(self._current_group)
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
Fix bug with collector losing last group of input
This means the script runner doesn't have to manually finalise the last
input, which was always a bit silly. In fact, the whole metaphor is
rather silly. I should change it to be "start new group" instead.class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._groups.append(self._current_group)
self._current_group.append(new_input)
def finalise_group(self):
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
<commit_before>class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._current_group.append(new_input)
def finalise_group(self):
self._groups.append(self._current_group)
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
<commit_msg>Fix bug with collector losing last group of input
This means the script runner doesn't have to manually finalise the last
input, which was always a bit silly. In fact, the whole metaphor is
rather silly. I should change it to be "start new group" instead.<commit_after>class Collector(object):
def __init__(self):
self._groups = []
self._current_group = None
def add_input(self, new_input):
if self._current_group is None:
self._current_group = []
self._groups.append(self._current_group)
self._current_group.append(new_input)
def finalise_group(self):
self._current_group = None
@property
def groups(self):
return self._groups
@property
def current_group(self):
return self._current_group
def __str__(self):
temp = ""
for group in self._groups:
for text in group:
temp += text + "\n"
temp += "\n"
return temp
|
6d5e80771f04fe2aa7cb83c89bdb4e16178b219b
|
DB.py
|
DB.py
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
cur.execute('''DROP TABLE IF EXISTS PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
Drop existing products table before creation
|
Drop existing products table before creation
|
Python
|
mit
|
joykuotw/python-endpoints,joykuotw/python-endpoints,joykuotw/python-endpoints
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')Drop existing products table before creation
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
cur.execute('''DROP TABLE IF EXISTS PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
<commit_before># Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')<commit_msg>Drop existing products table before creation<commit_after>
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
cur.execute('''DROP TABLE IF EXISTS PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')Drop existing products table before creation# Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
cur.execute('''DROP TABLE IF EXISTS PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
<commit_before># Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')<commit_msg>Drop existing products table before creation<commit_after># Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
try:
conn = sqlite3.connect('databaseForTest.db')
print 'DB Creation Successful!'
cur = conn.cursor()
cur.execute('''DROP TABLE IF EXISTS PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
except Exception as err:
print "Error: ", err
c = createDB().readCSV('products.csv')
|
8e0afc06d221d86677a172fdb7d1388225504ba6
|
resp/__main__.py
|
resp/__main__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
Add specific required-property to all arguments
|
Add specific required-property to all arguments
|
Python
|
mit
|
nok/resp,nok/resp
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
Add specific required-property to all arguments
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Add specific required-property to all arguments<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
Add specific required-property to all arguments#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Add specific required-property to all arguments<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
8e603328ff08888a1236e6b8ca0adbeb8bae819b
|
ckanext/ckanext-apply_permissions_for_service/ckanext/apply_permissions_for_service/logic.py
|
ckanext/ckanext-apply_permissions_for_service/ckanext/apply_permissions_for_service/logic.py
|
from ckan.plugins import toolkit as tk
import model
import ckan.model as ckan_model
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
organization = data_dict.get('organization')
vat_id = data_dict.get('vat_id')
contact_person_name = data_dict.get('contact_person_name')
contact_person_email = data_dict.get('contact_person_email')
ip_address_list = data_dict.get('ip_address_list')
subsystem_code = data_dict.get('subsystem_code')
api_id = data_dict.get('api_id')
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
from ckan.plugins import toolkit as tk
import model
_ = tk._
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
errors = {}
error_summary = {}
organization = data_dict.get('organization')
if organization is None:
errors['organization'] = _('Missing value')
vat_id = data_dict.get('vat_id')
if vat_id is None:
errors['vat_id'] = _('Missing value')
contact_person_name = data_dict.get('contact_person_name')
if contact_person_name is None:
errors['contact_person_name'] = _('Missing value')
contact_person_email = data_dict.get('contact_person_email')
if contact_person_email is None:
errors['contact_person_email'] = _('Missing value')
ip_address_list = data_dict.get('ip_address_list')
if ip_address_list is None:
errors['ip_address_list'] = _('Missing value')
subsystem_code = data_dict.get('subsystem_code')
if subsystem_code is None:
errors['subsystem_code'] = _('Missing value')
api_id = data_dict.get('api_id')
if api_id is None:
errors['api_id'] = _('Missing value')
if errors:
raise tk.ValidationError(errors)
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
Add validation to api for missing required values
|
LIKA-106: Add validation to api for missing required values
|
Python
|
mit
|
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
|
from ckan.plugins import toolkit as tk
import model
import ckan.model as ckan_model
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
organization = data_dict.get('organization')
vat_id = data_dict.get('vat_id')
contact_person_name = data_dict.get('contact_person_name')
contact_person_email = data_dict.get('contact_person_email')
ip_address_list = data_dict.get('ip_address_list')
subsystem_code = data_dict.get('subsystem_code')
api_id = data_dict.get('api_id')
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
LIKA-106: Add validation to api for missing required values
|
from ckan.plugins import toolkit as tk
import model
_ = tk._
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
errors = {}
error_summary = {}
organization = data_dict.get('organization')
if organization is None:
errors['organization'] = _('Missing value')
vat_id = data_dict.get('vat_id')
if vat_id is None:
errors['vat_id'] = _('Missing value')
contact_person_name = data_dict.get('contact_person_name')
if contact_person_name is None:
errors['contact_person_name'] = _('Missing value')
contact_person_email = data_dict.get('contact_person_email')
if contact_person_email is None:
errors['contact_person_email'] = _('Missing value')
ip_address_list = data_dict.get('ip_address_list')
if ip_address_list is None:
errors['ip_address_list'] = _('Missing value')
subsystem_code = data_dict.get('subsystem_code')
if subsystem_code is None:
errors['subsystem_code'] = _('Missing value')
api_id = data_dict.get('api_id')
if api_id is None:
errors['api_id'] = _('Missing value')
if errors:
raise tk.ValidationError(errors)
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
<commit_before>from ckan.plugins import toolkit as tk
import model
import ckan.model as ckan_model
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
organization = data_dict.get('organization')
vat_id = data_dict.get('vat_id')
contact_person_name = data_dict.get('contact_person_name')
contact_person_email = data_dict.get('contact_person_email')
ip_address_list = data_dict.get('ip_address_list')
subsystem_code = data_dict.get('subsystem_code')
api_id = data_dict.get('api_id')
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
<commit_msg>LIKA-106: Add validation to api for missing required values<commit_after>
|
from ckan.plugins import toolkit as tk
import model
_ = tk._
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
errors = {}
error_summary = {}
organization = data_dict.get('organization')
if organization is None:
errors['organization'] = _('Missing value')
vat_id = data_dict.get('vat_id')
if vat_id is None:
errors['vat_id'] = _('Missing value')
contact_person_name = data_dict.get('contact_person_name')
if contact_person_name is None:
errors['contact_person_name'] = _('Missing value')
contact_person_email = data_dict.get('contact_person_email')
if contact_person_email is None:
errors['contact_person_email'] = _('Missing value')
ip_address_list = data_dict.get('ip_address_list')
if ip_address_list is None:
errors['ip_address_list'] = _('Missing value')
subsystem_code = data_dict.get('subsystem_code')
if subsystem_code is None:
errors['subsystem_code'] = _('Missing value')
api_id = data_dict.get('api_id')
if api_id is None:
errors['api_id'] = _('Missing value')
if errors:
raise tk.ValidationError(errors)
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
from ckan.plugins import toolkit as tk
import model
import ckan.model as ckan_model
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
organization = data_dict.get('organization')
vat_id = data_dict.get('vat_id')
contact_person_name = data_dict.get('contact_person_name')
contact_person_email = data_dict.get('contact_person_email')
ip_address_list = data_dict.get('ip_address_list')
subsystem_code = data_dict.get('subsystem_code')
api_id = data_dict.get('api_id')
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
LIKA-106: Add validation to api for missing required valuesfrom ckan.plugins import toolkit as tk
import model
_ = tk._
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
errors = {}
error_summary = {}
organization = data_dict.get('organization')
if organization is None:
errors['organization'] = _('Missing value')
vat_id = data_dict.get('vat_id')
if vat_id is None:
errors['vat_id'] = _('Missing value')
contact_person_name = data_dict.get('contact_person_name')
if contact_person_name is None:
errors['contact_person_name'] = _('Missing value')
contact_person_email = data_dict.get('contact_person_email')
if contact_person_email is None:
errors['contact_person_email'] = _('Missing value')
ip_address_list = data_dict.get('ip_address_list')
if ip_address_list is None:
errors['ip_address_list'] = _('Missing value')
subsystem_code = data_dict.get('subsystem_code')
if subsystem_code is None:
errors['subsystem_code'] = _('Missing value')
api_id = data_dict.get('api_id')
if api_id is None:
errors['api_id'] = _('Missing value')
if errors:
raise tk.ValidationError(errors)
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
<commit_before>from ckan.plugins import toolkit as tk
import model
import ckan.model as ckan_model
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
organization = data_dict.get('organization')
vat_id = data_dict.get('vat_id')
contact_person_name = data_dict.get('contact_person_name')
contact_person_email = data_dict.get('contact_person_email')
ip_address_list = data_dict.get('ip_address_list')
subsystem_code = data_dict.get('subsystem_code')
api_id = data_dict.get('api_id')
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
<commit_msg>LIKA-106: Add validation to api for missing required values<commit_after>from ckan.plugins import toolkit as tk
import model
_ = tk._
def service_permission_application_create(context, data_dict):
tk.check_access('service_permission_application_create', context, data_dict)
errors = {}
error_summary = {}
organization = data_dict.get('organization')
if organization is None:
errors['organization'] = _('Missing value')
vat_id = data_dict.get('vat_id')
if vat_id is None:
errors['vat_id'] = _('Missing value')
contact_person_name = data_dict.get('contact_person_name')
if contact_person_name is None:
errors['contact_person_name'] = _('Missing value')
contact_person_email = data_dict.get('contact_person_email')
if contact_person_email is None:
errors['contact_person_email'] = _('Missing value')
ip_address_list = data_dict.get('ip_address_list')
if ip_address_list is None:
errors['ip_address_list'] = _('Missing value')
subsystem_code = data_dict.get('subsystem_code')
if subsystem_code is None:
errors['subsystem_code'] = _('Missing value')
api_id = data_dict.get('api_id')
if api_id is None:
errors['api_id'] = _('Missing value')
if errors:
raise tk.ValidationError(errors)
request_description = data_dict.get('api_id')
model.ApplyPermission.create(organization=organization, vat_id=vat_id,
contact_person_name=contact_person_name,
contact_person_email=contact_person_email,
ip_address_list=ip_address_list,
subsystem_code=subsystem_code,
api_id=api_id,
request_description=request_description)
|
56441d42ed87e2adad8b36c25cf695b0747a8c16
|
tests/djworkflows/models.py
|
tests/djworkflows/models.py
|
from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
|
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
Use imports from django_xworkflows instead of imports from xworkflows in tests
|
Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polyconseil.fr>
|
Python
|
bsd-2-clause
|
rbarrois/django_xworkflows
|
from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polyconseil.fr>
|
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
<commit_before>from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
<commit_msg>Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polyconseil.fr><commit_after>
|
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polyconseil.fr>from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
<commit_before>from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
<commit_msg>Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polyconseil.fr><commit_after>from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
83a16ba4485f3e483adc20352cb0cef7c02f8ef2
|
tests/test_config_schema.py
|
tests/test_config_schema.py
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from flexget import plugin
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_plugin_schemas_are_valid(self):
for p in plugin.plugins.values():
if p.schema is None:
continue
try:
config_schema.SchemaValidator.check_schema(p.schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
p.name, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_registered_schemas_are_valid(self):
for path in config_schema.schema_paths:
schema = config_schema.resolve_ref(path)
try:
config_schema.SchemaValidator.check_schema(schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
path, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
Convert unit test to test all registered schemas instead of plugins directly.
|
Convert unit test to test all registered schemas instead of plugins directly.
|
Python
|
mit
|
vfrc2/Flexget,poulpito/Flexget,jacobmetrick/Flexget,ibrahimkarahan/Flexget,Danfocus/Flexget,oxc/Flexget,ibrahimkarahan/Flexget,asm0dey/Flexget,qvazzler/Flexget,dsemi/Flexget,crawln45/Flexget,thalamus/Flexget,tsnoam/Flexget,patsissons/Flexget,tsnoam/Flexget,vfrc2/Flexget,Danfocus/Flexget,drwyrm/Flexget,v17al/Flexget,grrr2/Flexget,lildadou/Flexget,OmgOhnoes/Flexget,voriux/Flexget,crawln45/Flexget,ianstalk/Flexget,Danfocus/Flexget,spencerjanssen/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,tobinjt/Flexget,thalamus/Flexget,ratoaq2/Flexget,gazpachoking/Flexget,qk4l/Flexget,grrr2/Flexget,lildadou/Flexget,tarzasai/Flexget,lildadou/Flexget,OmgOhnoes/Flexget,ZefQ/Flexget,antivirtel/Flexget,vfrc2/Flexget,Flexget/Flexget,qvazzler/Flexget,tobinjt/Flexget,patsissons/Flexget,xfouloux/Flexget,JorisDeRieck/Flexget,camon/Flexget,jawilson/Flexget,qk4l/Flexget,offbyone/Flexget,tarzasai/Flexget,poulpito/Flexget,sean797/Flexget,camon/Flexget,jacobmetrick/Flexget,Pretagonist/Flexget,tobinjt/Flexget,thalamus/Flexget,tsnoam/Flexget,tarzasai/Flexget,ratoaq2/Flexget,Pretagonist/Flexget,ianstalk/Flexget,ibrahimkarahan/Flexget,ZefQ/Flexget,gazpachoking/Flexget,grrr2/Flexget,Flexget/Flexget,malkavi/Flexget,Flexget/Flexget,spencerjanssen/Flexget,tvcsantos/Flexget,X-dark/Flexget,Flexget/Flexget,LynxyssCZ/Flexget,v17al/Flexget,asm0dey/Flexget,dsemi/Flexget,drwyrm/Flexget,tobinjt/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,oxc/Flexget,cvium/Flexget,qk4l/Flexget,antivirtel/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,cvium/Flexget,tvcsantos/Flexget,ianstalk/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,X-dark/Flexget,patsissons/Flexget,antivirtel/Flexget,LynxyssCZ/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,jacobmetrick/Flexget,crawln45/Flexget,cvium/Flexget,jawilson/Flexget,jawilson/Flexget,oxc/Flexget,Danfocus/Flexget,xfouloux/Flexget,asm0dey/Flexget,crawln45/Flexget,spencerjanssen/Flexget,dsemi/Flexget,X-dark/Flexget,malkavi/Flexget,v17al/Flexget,ZefQ/Flexget,Pretagonist/Flexget,OmgOhnoes/Flexget,xfouloux/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,sean797/Flexget,qvazzler/Flexget,voriux/Flexget,sean797/Flexget
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from flexget import plugin
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_plugin_schemas_are_valid(self):
for p in plugin.plugins.values():
if p.schema is None:
continue
try:
config_schema.SchemaValidator.check_schema(p.schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
p.name, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
Convert unit test to test all registered schemas instead of plugins directly.
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_registered_schemas_are_valid(self):
for path in config_schema.schema_paths:
schema = config_schema.resolve_ref(path)
try:
config_schema.SchemaValidator.check_schema(schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
path, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from flexget import plugin
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_plugin_schemas_are_valid(self):
for p in plugin.plugins.values():
if p.schema is None:
continue
try:
config_schema.SchemaValidator.check_schema(p.schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
p.name, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
<commit_msg>Convert unit test to test all registered schemas instead of plugins directly.<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_registered_schemas_are_valid(self):
for path in config_schema.schema_paths:
schema = config_schema.resolve_ref(path)
try:
config_schema.SchemaValidator.check_schema(schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
path, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from flexget import plugin
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_plugin_schemas_are_valid(self):
for p in plugin.plugins.values():
if p.schema is None:
continue
try:
config_schema.SchemaValidator.check_schema(p.schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
p.name, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
Convert unit test to test all registered schemas instead of plugins directly.from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_registered_schemas_are_valid(self):
for path in config_schema.schema_paths:
schema = config_schema.resolve_ref(path)
try:
config_schema.SchemaValidator.check_schema(schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
path, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from flexget import plugin
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_plugin_schemas_are_valid(self):
for p in plugin.plugins.values():
if p.schema is None:
continue
try:
config_schema.SchemaValidator.check_schema(p.schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
p.name, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
<commit_msg>Convert unit test to test all registered schemas instead of plugins directly.<commit_after>from __future__ import unicode_literals, division, absolute_import
import jsonschema
from flexget import config_schema
from tests import FlexGetBase
class TestSchemaValidator(FlexGetBase):
def test_registered_schemas_are_valid(self):
for path in config_schema.schema_paths:
schema = config_schema.resolve_ref(path)
try:
config_schema.SchemaValidator.check_schema(schema)
except jsonschema.SchemaError as e:
assert False, 'plugin `%s` has an invalid schema. %s %s' % (
path, '/'.join(str(p) for p in e.path), e.message)
def test_resolves_local_refs(self):
schema = {'$ref': '/schema/plugin/accept_all'}
v = config_schema.SchemaValidator(schema)
# accept_all schema should be for type boolean
assert v.is_valid(True)
assert not v.is_valid(14)
def test_custom_format_checker(self):
schema = {'type': 'string', 'format': 'quality'}
v = config_schema.SchemaValidator(schema)
assert v.is_valid('720p')
assert not v.is_valid('aoeu')
|
bc071a524d1695e6d95b42709442dddaf4185cd9
|
account_invoice_start_end_dates/__manifest__.py
|
account_invoice_start_end_dates/__manifest__.py
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"maintainers": ["alexis-via"],
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
FIX visibility of forecast button
|
FIX visibility of forecast button
Default value for cutoff date is end date of previous fiscal year
|
Python
|
agpl-3.0
|
OCA/account-closing,OCA/account-closing
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
FIX visibility of forecast button
Default value for cutoff date is end date of previous fiscal year
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"maintainers": ["alexis-via"],
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
<commit_before># Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
<commit_msg>FIX visibility of forecast button
Default value for cutoff date is end date of previous fiscal year<commit_after>
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"maintainers": ["alexis-via"],
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
FIX visibility of forecast button
Default value for cutoff date is end date of previous fiscal year# Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"maintainers": ["alexis-via"],
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
<commit_before># Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
<commit_msg>FIX visibility of forecast button
Default value for cutoff date is end date of previous fiscal year<commit_after># Copyright 2016-2019 Akretion France
# Copyright 2018-2019 Camptocamp
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Account Invoice Start End Dates",
"version": "13.0.1.0.0",
"category": "Accounting & Finance",
"license": "AGPL-3",
"summary": "Adds start/end dates on invoice/move lines",
"author": "Akretion,Odoo Community Association (OCA)",
"maintainers": ["alexis-via"],
"website": "https://github.com/OCA/account-closing",
"depends": ["account",],
"data": ["views/account_move.xml", "views/product.xml",],
"demo": ["demo/product_demo.xml"],
"installable": True,
}
|
f8dd1fd8ee899c0147a9a88149097e9b7cd68f01
|
tests/generic_views/views.py
|
tests/generic_views/views.py
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
templated_email_recipient_form_field = 'email'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
Remove unecessary attribute from test
|
Remove unecessary attribute from test
|
Python
|
mit
|
BradWhittington/django-templated-email,BradWhittington/django-templated-email,vintasoftware/django-templated-email,vintasoftware/django-templated-email
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
templated_email_recipient_form_field = 'email'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
Remove unecessary attribute from test
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
<commit_before>from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
templated_email_recipient_form_field = 'email'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
<commit_msg>Remove unecessary attribute from test<commit_after>
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
templated_email_recipient_form_field = 'email'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
Remove unecessary attribute from testfrom django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
<commit_before>from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
templated_email_recipient_form_field = 'email'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
<commit_msg>Remove unecessary attribute from test<commit_after>from django.views.generic.edit import CreateView
from templated_email.generic_views import TemplatedEmailFormViewMixin
from tests.generic_views.models import Author
# This view send a welcome email to the author
class AuthorCreateView(TemplatedEmailFormViewMixin, CreateView):
model = Author
fields = ['name', 'email']
templated_email_template_name = 'welcome'
template_name = 'authors/create_author.html'
success_url = '/create_author/'
def templated_email_get_recipients(self, form):
return [form.data['email']]
|
05ba498867ff16c4221dcd758d5cdef9ee884b27
|
modules/test_gitdata.py
|
modules/test_gitdata.py
|
from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
|
import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
|
Convert GitData tests to a unittest suite
|
Convert GitData tests to a unittest suite
|
Python
|
bsd-2-clause
|
OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api
|
from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
Convert GitData tests to a unittest suite
|
import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
|
<commit_before>from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
<commit_msg>Convert GitData tests to a unittest suite<commit_after>
|
import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
|
from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
Convert GitData tests to a unittest suiteimport unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
|
<commit_before>from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
<commit_msg>Convert GitData tests to a unittest suite<commit_after>import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
|
a05372ad910900ec2ef89bb10d4a0759c9bcd437
|
app.py
|
app.py
|
import os
from flask import Flask, request, redirect, session
import twilio.twiml
from twilio.rest import TwilioRestClient
from charity import Charity
SECRET_KEY = os.environ['DONATION_SECRET_KEY']
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.values.get('From', None)
client = TwilioRestClient()
charity = Charity()
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the message!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
import os
from flask import Flask, request
import twilio.twiml
from twilio.rest import TwilioRestClient
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.args.get('From')
text_content = request.args.get('Body').lower()
client = TwilioRestClient(os.environ['TWILIO_ACCOUNT_SID'],
os.environ['TWILIO_AUTH_TOKEN'])
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the donation!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Test sending a fresh message
|
Test sending a fresh message
|
Python
|
mit
|
DanielleSucher/Text-Donation
|
import os
from flask import Flask, request, redirect, session
import twilio.twiml
from twilio.rest import TwilioRestClient
from charity import Charity
SECRET_KEY = os.environ['DONATION_SECRET_KEY']
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.values.get('From', None)
client = TwilioRestClient()
charity = Charity()
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the message!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Test sending a fresh message
|
import os
from flask import Flask, request
import twilio.twiml
from twilio.rest import TwilioRestClient
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.args.get('From')
text_content = request.args.get('Body').lower()
client = TwilioRestClient(os.environ['TWILIO_ACCOUNT_SID'],
os.environ['TWILIO_AUTH_TOKEN'])
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the donation!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import os
from flask import Flask, request, redirect, session
import twilio.twiml
from twilio.rest import TwilioRestClient
from charity import Charity
SECRET_KEY = os.environ['DONATION_SECRET_KEY']
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.values.get('From', None)
client = TwilioRestClient()
charity = Charity()
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the message!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Test sending a fresh message<commit_after>
|
import os
from flask import Flask, request
import twilio.twiml
from twilio.rest import TwilioRestClient
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.args.get('From')
text_content = request.args.get('Body').lower()
client = TwilioRestClient(os.environ['TWILIO_ACCOUNT_SID'],
os.environ['TWILIO_AUTH_TOKEN'])
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the donation!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
import os
from flask import Flask, request, redirect, session
import twilio.twiml
from twilio.rest import TwilioRestClient
from charity import Charity
SECRET_KEY = os.environ['DONATION_SECRET_KEY']
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.values.get('From', None)
client = TwilioRestClient()
charity = Charity()
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the message!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Test sending a fresh messageimport os
from flask import Flask, request
import twilio.twiml
from twilio.rest import TwilioRestClient
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.args.get('From')
text_content = request.args.get('Body').lower()
client = TwilioRestClient(os.environ['TWILIO_ACCOUNT_SID'],
os.environ['TWILIO_AUTH_TOKEN'])
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the donation!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import os
from flask import Flask, request, redirect, session
import twilio.twiml
from twilio.rest import TwilioRestClient
from charity import Charity
SECRET_KEY = os.environ['DONATION_SECRET_KEY']
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.values.get('From', None)
client = TwilioRestClient()
charity = Charity()
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the message!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Test sending a fresh message<commit_after>import os
from flask import Flask, request
import twilio.twiml
from twilio.rest import TwilioRestClient
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
from_number = request.args.get('From')
text_content = request.args.get('Body').lower()
client = TwilioRestClient(os.environ['TWILIO_ACCOUNT_SID'],
os.environ['TWILIO_AUTH_TOKEN'])
client.sms.messages.create(to="+17187535039",
from_=from_number,
body="fresh message!")
message = from_number + ", thanks for the donation!"
resp = twilio.twiml.Response()
resp.sms(message)
return str(resp)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
94e0e31a8329cbbdc1545fa5c12b04600422627f
|
main.py
|
main.py
|
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
try:
from package_control import events
except ImportError:
pass
else:
if events.post_upgrade(__package__):
# clean up sys.modules to ensure all submodules are reloaded
import sys
modules_to_clear = set()
for module_name in sys.modules:
if module_name.startswith(__package__):
modules_to_clear.add(module_name)
print("[{}] Cleaning up {} cached modules after update…"
.format(__package__, len(modules_to_clear)))
for module_name in modules_to_clear:
del sys.modules[module_name]
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
Add code to remove cached sub-modules on upgrade
|
Add code to remove cached sub-modules on upgrade
This is untested because I'm too lazy to simulate a package updating
situation, but I generally believe it should work. It shouldn't break
anything, at least.
|
Python
|
mit
|
SublimeText/AAAPackageDev,SublimeText/PackageDev,SublimeText/AAAPackageDev
|
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
Add code to remove cached sub-modules on upgrade
This is untested because I'm too lazy to simulate a package updating
situation, but I generally believe it should work. It shouldn't break
anything, at least.
|
try:
from package_control import events
except ImportError:
pass
else:
if events.post_upgrade(__package__):
# clean up sys.modules to ensure all submodules are reloaded
import sys
modules_to_clear = set()
for module_name in sys.modules:
if module_name.startswith(__package__):
modules_to_clear.add(module_name)
print("[{}] Cleaning up {} cached modules after update…"
.format(__package__, len(modules_to_clear)))
for module_name in modules_to_clear:
del sys.modules[module_name]
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
<commit_before># Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
<commit_msg>Add code to remove cached sub-modules on upgrade
This is untested because I'm too lazy to simulate a package updating
situation, but I generally believe it should work. It shouldn't break
anything, at least.<commit_after>
|
try:
from package_control import events
except ImportError:
pass
else:
if events.post_upgrade(__package__):
# clean up sys.modules to ensure all submodules are reloaded
import sys
modules_to_clear = set()
for module_name in sys.modules:
if module_name.startswith(__package__):
modules_to_clear.add(module_name)
print("[{}] Cleaning up {} cached modules after update…"
.format(__package__, len(modules_to_clear)))
for module_name in modules_to_clear:
del sys.modules[module_name]
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
Add code to remove cached sub-modules on upgrade
This is untested because I'm too lazy to simulate a package updating
situation, but I generally believe it should work. It shouldn't break
anything, at least.try:
from package_control import events
except ImportError:
pass
else:
if events.post_upgrade(__package__):
# clean up sys.modules to ensure all submodules are reloaded
import sys
modules_to_clear = set()
for module_name in sys.modules:
if module_name.startswith(__package__):
modules_to_clear.add(module_name)
print("[{}] Cleaning up {} cached modules after update…"
.format(__package__, len(modules_to_clear)))
for module_name in modules_to_clear:
del sys.modules[module_name]
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
<commit_before># Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
<commit_msg>Add code to remove cached sub-modules on upgrade
This is untested because I'm too lazy to simulate a package updating
situation, but I generally believe it should work. It shouldn't break
anything, at least.<commit_after>try:
from package_control import events
except ImportError:
pass
else:
if events.post_upgrade(__package__):
# clean up sys.modules to ensure all submodules are reloaded
import sys
modules_to_clear = set()
for module_name in sys.modules:
if module_name.startswith(__package__):
modules_to_clear.add(module_name)
print("[{}] Cleaning up {} cached modules after update…"
.format(__package__, len(modules_to_clear)))
for module_name in modules_to_clear:
del sys.modules[module_name]
# Must be named "plugins_"
# because sublime_plugin claims a plugin module's `plugin` attribute for itself.
from .plugins_ import * # noqa
|
35a2e4ecfc7c39ca477279a49d1a49bb4395b7ad
|
main.py
|
main.py
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle can't be deciphered.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle isn't correctly engraved.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
Make a better error message for ValidationError
|
Make a better error message for ValidationError
|
Python
|
unlicense
|
elwinar/chronicler
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle can't be deciphered.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
Make a better error message for ValidationError
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle isn't correctly engraved.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
<commit_before>"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle can't be deciphered.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
<commit_msg>Make a better error message for ValidationError<commit_after>
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle isn't correctly engraved.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle can't be deciphered.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
Make a better error message for ValidationError"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle isn't correctly engraved.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
<commit_before>"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle can't be deciphered.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
<commit_msg>Make a better error message for ValidationError<commit_after>"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.hjson]
"""
import docopt
import hjson
import jsonschema
import chronicle
def main():
options = docopt.docopt(__doc__)
try:
c = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
c = hjson.load(c)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
try:
jsonschema.validate(c, chronicle.schema)
except jsonschema.ValidationError as e:
print("This chronicle isn't correctly engraved.")
print("%s: %s" % (list(e.path), e.message))
exit(1)
print("Behold my story:")
played = 0
won = 0
for h in c:
for a in h['against']:
played += 1
if a['result']['victory'] == True:
won += 1
print("victories: %d/%d" % (won, played))
if __name__ == '__main__':
main()
|
968274deace1aa16d45df350c437eab699d02b16
|
byceps/services/brand/transfer/models.py
|
byceps/services/brand/transfer/models.py
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: str
image_url_path: str
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: Optional[str]
image_url_path: Optional[str]
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
Fix type hints of brand DTO image fields
|
Fix type hints of brand DTO image fields
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: str
image_url_path: str
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
Fix type hints of brand DTO image fields
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: Optional[str]
image_url_path: Optional[str]
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
<commit_before>"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: str
image_url_path: str
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
<commit_msg>Fix type hints of brand DTO image fields<commit_after>
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: Optional[str]
image_url_path: Optional[str]
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: str
image_url_path: str
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
Fix type hints of brand DTO image fields"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: Optional[str]
image_url_path: Optional[str]
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
<commit_before>"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: str
image_url_path: str
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
<commit_msg>Fix type hints of brand DTO image fields<commit_after>"""
byceps.services.brand.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import BrandID
@dataclass(frozen=True)
class Brand:
id: BrandID
title: str
image_filename: Optional[str]
image_url_path: Optional[str]
@dataclass(frozen=True)
class BrandSetting:
brand_id: BrandID
name: str
value: str
|
7d7c732f0a2d4f326b7bd760c3c02814848914e5
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.0',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.1',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.1',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
Bump version due to PyPI submit error caused by server outage.
|
Bump version due to PyPI submit error caused by server outage.
|
Python
|
mit
|
BlasiusVonSzerencsi/pagerduty-events-api
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.0',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
Bump version due to PyPI submit error caused by server outage.
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.1',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.1',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
<commit_before>from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.0',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
<commit_msg>Bump version due to PyPI submit error caused by server outage.<commit_after>
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.1',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.1',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.0',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
Bump version due to PyPI submit error caused by server outage.from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.1',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.1',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
<commit_before>from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.0',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
<commit_msg>Bump version due to PyPI submit error caused by server outage.<commit_after>from setuptools import setup
setup(name='pagerduty_events_api',
version='0.2.1',
description='Python wrapper for Pagerduty Events API',
url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api',
download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.1',
author='Balazs Szerencsi',
author_email='balazs.szerencsi@icloud.com',
license='MIT',
packages=['pagerduty_events_api'],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose', 'ddt'],
install_requires=['requests'],
keywords=['pagerduty', 'event', 'api', 'incident', 'trigger', 'acknowledge', 'resolve'])
|
425056e6196dbce50f08d94f1578a2984b8a1c21
|
setup.py
|
setup.py
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
long_description=open('README.md').read(),
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
Read in README.md as long description
|
Read in README.md as long description
|
Python
|
apache-2.0
|
forseti-security/resource-policy-evaluation-library
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
Read in README.md as long description
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
long_description=open('README.md').read(),
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
<commit_before># Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
<commit_msg>Read in README.md as long description<commit_after>
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
long_description=open('README.md').read(),
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
Read in README.md as long description# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
long_description=open('README.md').read(),
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
<commit_before># Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
<commit_msg>Read in README.md as long description<commit_after># Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from setuptools import setup
setup(
name="rpe-lib",
description="A resource policy evaluation library",
long_description=open('README.md').read(),
author="Joe Ceresini",
url="https://github.com/forseti-security/resource-policy-evaluation-library",
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires=[
'google-api-python-client',
'google-api-python-client-helpers',
'tenacity',
],
packages=[
'rpe',
'rpe.engines',
'rpe.resources',
],
package_data={},
license="Apache 2.0",
keywords="gcp policy enforcement",
)
|
237abb4204821e6e90f17a438b8945d7b47b3406
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1',
'six >= 1.4.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
Remove six from dependency list.
|
Remove six from dependency list.
|
Python
|
mit
|
al45tair/dmgbuild
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1',
'six >= 1.4.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
Remove six from dependency list.
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1',
'six >= 1.4.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
<commit_msg>Remove six from dependency list.<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1',
'six >= 1.4.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
Remove six from dependency list.# -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1',
'six >= 1.4.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
<commit_msg>Remove six from dependency list.<commit_after># -*- coding: utf-8 -*-
import sys
from setuptools import setup
with open('README.rst', 'rb') as f:
long_desc = f.read().decode('utf-8')
# We have to be able to install on Linux to build the docs, even though
# dmgbuild presently won't work there because there's no SetFile
requires=['ds_store >= 1.1.0',
'mac_alias >= 2.0.1']
if sys.platform.startswith('darwin'):
requires.append('pyobjc-framework-Quartz >= 3.0.4')
setup(name='dmgbuild',
version='1.3.0',
description='Mac OS X command line utility to build disk images',
long_description=long_desc,
author='Alastair Houghton',
author_email='alastair@alastairs-place.net',
url='http://alastairs-place.net/projects/dmgbuild',
license='MIT License',
platforms='darwin',
packages=['dmgbuild'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Desktop Environment',
],
package_data = {
'dmgbuild': ['resources/*']
},
scripts=['scripts/dmgbuild'],
install_requires=requires,
provides=['dmgbuild']
)
|
e9046cd97c1deba9ba70bf60cfdba81eba6e0210
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'imperative': ['toolz >= 0.7.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
Add dep on toolz 0.7.2 for imperative extra
|
Add dep on toolz 0.7.2 for imperative extra
|
Python
|
bsd-3-clause
|
dask/dask,jcrist/dask,cowlicks/dask,dask/dask,ContinuumIO/dask,blaze/dask,mraspaud/dask,gameduell/dask,cpcloud/dask,mikegraham/dask,chrisbarber/dask,jakirkham/dask,blaze/dask,mraspaud/dask,jcrist/dask,ContinuumIO/dask,mrocklin/dask,jakirkham/dask,mrocklin/dask
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
Add dep on toolz 0.7.2 for imperative extra
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'imperative': ['toolz >= 0.7.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
<commit_before>#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
<commit_msg>Add dep on toolz 0.7.2 for imperative extra<commit_after>
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'imperative': ['toolz >= 0.7.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
Add dep on toolz 0.7.2 for imperative extra#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'imperative': ['toolz >= 0.7.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
<commit_before>#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
<commit_msg>Add dep on toolz 0.7.2 for imperative extra<commit_after>#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import dask
extras_require = {
'array': ['numpy', 'toolz >= 0.7.2'],
'bag': ['cloudpickle', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'dataframe': ['numpy', 'pandas >= 0.16.0', 'toolz >= 0.7.2', 'partd >= 0.3.2'],
'imperative': ['toolz >= 0.7.2'],
}
extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
setup(name='dask',
version=dask.__version__,
description='Minimal task scheduling abstraction',
url='http://github.com/dask/dask/',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='task-scheduling parallelism',
packages=['dask', 'dask.array', 'dask.bag', 'dask.store',
'dask.dataframe', 'dask.diagnostics'],
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
extras_require=extras_require,
zip_safe=False)
|
1dbaae42645a4b5873a603f3ed9ce8c08a1467ec
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
Add back download url :/
|
Add back download url :/
|
Python
|
mit
|
VonStruddle/PyHunter
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
Add back download url :/
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
<commit_msg>Add back download url :/<commit_after>
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
Add back download url :/from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
<commit_msg>Add back download url :/<commit_after>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.1',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
2433f8f3249b46e39a3dc9f036720eb80702df6e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.1'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.2'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
Increase version 3.1.1 -> 3.1.2
|
PRJ: Increase version 3.1.1 -> 3.1.2
|
Python
|
mit
|
SiLab-Bonn/pixel_clusterizer
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.1'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
PRJ: Increase version 3.1.1 -> 3.1.2
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.2'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.1'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
<commit_msg>PRJ: Increase version 3.1.1 -> 3.1.2<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.2'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.1'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
PRJ: Increase version 3.1.1 -> 3.1.2#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.2'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.1'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
<commit_msg>PRJ: Increase version 3.1.1 -> 3.1.2<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.2'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python. The clustering happens with numba on numpy arrays to increase the speed.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
platforms='any'
)
|
f1254e6116b22923ab6f988c6cf5dca91623c678
|
setup.py
|
setup.py
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
Allow users to run Step Stool as either `step-stool` or `stepstool`.
|
Allow users to run Step Stool as either `step-stool` or `stepstool`.
|
Python
|
mit
|
chriskrycho/step-stool,chriskrycho/step-stool
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
Allow users to run Step Stool as either `step-stool` or `stepstool`.
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
<commit_before>from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
<commit_msg>Allow users to run Step Stool as either `step-stool` or `stepstool`.<commit_after>
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
Allow users to run Step Stool as either `step-stool` or `stepstool`.from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
<commit_before>from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
<commit_msg>Allow users to run Step Stool as either `step-stool` or `stepstool`.<commit_after>from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='chris@step-stool.com',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
ae97d45456854c2e584840bcefe598f889dcb737
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages=find_packages(),
install_requires=[
'invoke'
],
setup_requires=['pytest-runner'],
extras_require={
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages = find_packages(),
install_requires = [
'invoke'
],
setup_requires = ['pytest-runner'],
extras_require = {
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
Add spaces around equal signs.
|
Add spaces around equal signs.
|
Python
|
apache-2.0
|
rqelibari/sirup
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages=find_packages(),
install_requires=[
'invoke'
],
setup_requires=['pytest-runner'],
extras_require={
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
Add spaces around equal signs.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages = find_packages(),
install_requires = [
'invoke'
],
setup_requires = ['pytest-runner'],
extras_require = {
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages=find_packages(),
install_requires=[
'invoke'
],
setup_requires=['pytest-runner'],
extras_require={
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
<commit_msg>Add spaces around equal signs.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages = find_packages(),
install_requires = [
'invoke'
],
setup_requires = ['pytest-runner'],
extras_require = {
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages=find_packages(),
install_requires=[
'invoke'
],
setup_requires=['pytest-runner'],
extras_require={
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
Add spaces around equal signs.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages = find_packages(),
install_requires = [
'invoke'
],
setup_requires = ['pytest-runner'],
extras_require = {
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages=find_packages(),
install_requires=[
'invoke'
],
setup_requires=['pytest-runner'],
extras_require={
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
<commit_msg>Add spaces around equal signs.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup.py
Part of sirup project
(c) 2017 Copyright Rezart Qelibari <rqelibari@users.noreply.github.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Support setuptools only, distutils has a divergent and more annoying API
from setuptools import setup, find_packages
setup(
packages = find_packages(),
install_requires = [
'invoke'
],
setup_requires = ['pytest-runner'],
extras_require = {
'dev': [
'flake8',
'Sphinx',
'pytest'
]
}
)
|
395db381f6ad38465666efd2c56a261bcfdf38b9
|
common/djangoapps/track/backends/logger.py
|
common/djangoapps/track/backends/logger.py
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
raise
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
Raise UnicodeDecodeError exception after logging the exception
|
Raise UnicodeDecodeError exception after logging the exception
|
Python
|
agpl-3.0
|
wwj718/edx-platform,appsembler/edx-platform,zhenzhai/edx-platform,lduarte1991/edx-platform,raccoongang/edx-platform,kmoocdev2/edx-platform,edx/edx-platform,msegado/edx-platform,alu042/edx-platform,amir-qayyum-khan/edx-platform,Stanford-Online/edx-platform,tanmaykm/edx-platform,defance/edx-platform,kmoocdev2/edx-platform,longmen21/edx-platform,ovnicraft/edx-platform,appsembler/edx-platform,cognitiveclass/edx-platform,MakeHer/edx-platform,a-parhom/edx-platform,franosincic/edx-platform,mbareta/edx-platform-ft,eduNEXT/edx-platform,franosincic/edx-platform,analyseuc3m/ANALYSE-v1,fintech-circle/edx-platform,eduNEXT/edunext-platform,prarthitm/edxplatform,Ayub-Khan/edx-platform,Endika/edx-platform,solashirai/edx-platform,angelapper/edx-platform,CredoReference/edx-platform,ZLLab-Mooc/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,msegado/edx-platform,Edraak/circleci-edx-platform,cognitiveclass/edx-platform,proversity-org/edx-platform,shabab12/edx-platform,mitocw/edx-platform,CredoReference/edx-platform,EDUlib/edx-platform,naresh21/synergetics-edx-platform,doganov/edx-platform,ESOedX/edx-platform,ZLLab-Mooc/edx-platform,nttks/edx-platform,CredoReference/edx-platform,prarthitm/edxplatform,wwj718/edx-platform,deepsrijit1105/edx-platform,zhenzhai/edx-platform,simbs/edx-platform,jolyonb/edx-platform,Endika/edx-platform,simbs/edx-platform,antoviaque/edx-platform,wwj718/edx-platform,tanmaykm/edx-platform,wwj718/edx-platform,procangroup/edx-platform,marcore/edx-platform,pabloborrego93/edx-platform,bigdatauniversity/edx-platform,teltek/edx-platform,teltek/edx-platform,ampax/edx-platform,Edraak/circleci-edx-platform,jjmiranda/edx-platform,pepeportela/edx-platform,waheedahmed/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform,devs1991/test_edx_docmode,UOMx/edx-platform,ampax/edx-platform,itsjeyd/edx-platform,Lektorium-LLC/edx-platform,deepsrijit1105/edx-platform,zhenzhai/edx-platform,bigdatauniversity/edx-platform,solashirai/edx-platform,doganov/edx-platform,mitocw/edx-platform,nttks/edx-platform,simbs/edx-platform,devs1991/test_edx_docmode,louyihua/edx-platform,eduNEXT/edunext-platform,jolyonb/edx-platform,Edraak/edx-platform,cecep-edu/edx-platform,ovnicraft/edx-platform,CredoReference/edx-platform,fintech-circle/edx-platform,pomegranited/edx-platform,MakeHer/edx-platform,solashirai/edx-platform,halvertoluke/edx-platform,msegado/edx-platform,a-parhom/edx-platform,nttks/edx-platform,Edraak/edraak-platform,EDUlib/edx-platform,philanthropy-u/edx-platform,edx-solutions/edx-platform,gymnasium/edx-platform,cecep-edu/edx-platform,appsembler/edx-platform,analyseuc3m/ANALYSE-v1,ovnicraft/edx-platform,arbrandes/edx-platform,Lektorium-LLC/edx-platform,antoviaque/edx-platform,defance/edx-platform,analyseuc3m/ANALYSE-v1,shabab12/edx-platform,alu042/edx-platform,IndonesiaX/edx-platform,ESOedX/edx-platform,mbareta/edx-platform-ft,jjmiranda/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,EDUlib/edx-platform,pepeportela/edx-platform,fintech-circle/edx-platform,nttks/edx-platform,hastexo/edx-platform,ZLLab-Mooc/edx-platform,marcore/edx-platform,alu042/edx-platform,caesar2164/edx-platform,pepeportela/edx-platform,ahmedaljazzar/edx-platform,franosincic/edx-platform,ahmedaljazzar/edx-platform,shabab12/edx-platform,procangroup/edx-platform,raccoongang/edx-platform,wwj718/edx-platform,philanthropy-u/edx-platform,marcore/edx-platform,teltek/edx-platform,Livit/Livit.Learn.EdX,angelapper/edx-platform,pabloborrego93/edx-platform,cpennington/edx-platform,jzoldak/edx-platform,solashirai/edx-platform,longmen21/edx-platform,Edraak/edraak-platform,synergeticsedx/deployment-wipro,gymnasium/edx-platform,JioEducation/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,caesar2164/edx-platform,Lektorium-LLC/edx-platform,kmoocdev2/edx-platform,Ayub-Khan/edx-platform,appsembler/edx-platform,CourseTalk/edx-platform,kursitet/edx-platform,a-parhom/edx-platform,cecep-edu/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,TeachAtTUM/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,hastexo/edx-platform,RPI-OPENEDX/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,pabloborrego93/edx-platform,stvstnfrd/edx-platform,shurihell/testasia,UOMx/edx-platform,bigdatauniversity/edx-platform,longmen21/edx-platform,proversity-org/edx-platform,MakeHer/edx-platform,BehavioralInsightsTeam/edx-platform,hastexo/edx-platform,Ayub-Khan/edx-platform,RPI-OPENEDX/edx-platform,procangroup/edx-platform,BehavioralInsightsTeam/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,itsjeyd/edx-platform,nttks/edx-platform,tanmaykm/edx-platform,mitocw/edx-platform,itsjeyd/edx-platform,naresh21/synergetics-edx-platform,franosincic/edx-platform,itsjeyd/edx-platform,proversity-org/edx-platform,caesar2164/edx-platform,synergeticsedx/deployment-wipro,edx-solutions/edx-platform,cpennington/edx-platform,simbs/edx-platform,pomegranited/edx-platform,doganov/edx-platform,miptliot/edx-platform,pomegranited/edx-platform,MakeHer/edx-platform,defance/edx-platform,kursitet/edx-platform,prarthitm/edxplatform,bigdatauniversity/edx-platform,miptliot/edx-platform,pabloborrego93/edx-platform,longmen21/edx-platform,ZLLab-Mooc/edx-platform,RPI-OPENEDX/edx-platform,miptliot/edx-platform,hastexo/edx-platform,waheedahmed/edx-platform,Edraak/circleci-edx-platform,cpennington/edx-platform,eduNEXT/edx-platform,shurihell/testasia,teltek/edx-platform,Ayub-Khan/edx-platform,gymnasium/edx-platform,cognitiveclass/edx-platform,ESOedX/edx-platform,tanmaykm/edx-platform,halvertoluke/edx-platform,kursitet/edx-platform,halvertoluke/edx-platform,pomegranited/edx-platform,cognitiveclass/edx-platform,zhenzhai/edx-platform,synergeticsedx/deployment-wipro,JioEducation/edx-platform,Livit/Livit.Learn.EdX,Lektorium-LLC/edx-platform,shurihell/testasia,BehavioralInsightsTeam/edx-platform,ovnicraft/edx-platform,franosincic/edx-platform,bigdatauniversity/edx-platform,Edraak/edx-platform,devs1991/test_edx_docmode,amir-qayyum-khan/edx-platform,angelapper/edx-platform,jzoldak/edx-platform,kursitet/edx-platform,stvstnfrd/edx-platform,romain-li/edx-platform,10clouds/edx-platform,naresh21/synergetics-edx-platform,raccoongang/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,Edraak/circleci-edx-platform,UOMx/edx-platform,Edraak/edx-platform,mbareta/edx-platform-ft,romain-li/edx-platform,10clouds/edx-platform,TeachAtTUM/edx-platform,JioEducation/edx-platform,cpennington/edx-platform,romain-li/edx-platform,louyihua/edx-platform,shurihell/testasia,halvertoluke/edx-platform,eduNEXT/edunext-platform,a-parhom/edx-platform,ahmedaljazzar/edx-platform,edx/edx-platform,deepsrijit1105/edx-platform,Edraak/circleci-edx-platform,edx/edx-platform,devs1991/test_edx_docmode,antoviaque/edx-platform,IndonesiaX/edx-platform,stvstnfrd/edx-platform,alu042/edx-platform,Ayub-Khan/edx-platform,mitocw/edx-platform,TeachAtTUM/edx-platform,gymnasium/edx-platform,amir-qayyum-khan/edx-platform,devs1991/test_edx_docmode,zhenzhai/edx-platform,philanthropy-u/edx-platform,CourseTalk/edx-platform,romain-li/edx-platform,ahmedaljazzar/edx-platform,Livit/Livit.Learn.EdX,cognitiveclass/edx-platform,ampax/edx-platform,IndonesiaX/edx-platform,Edraak/edraak-platform,pepeportela/edx-platform,louyihua/edx-platform,shabab12/edx-platform,devs1991/test_edx_docmode,TeachAtTUM/edx-platform,kursitet/edx-platform,shurihell/testasia,procangroup/edx-platform,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,solashirai/edx-platform,louyihua/edx-platform,waheedahmed/edx-platform,chrisndodge/edx-platform,JioEducation/edx-platform,ovnicraft/edx-platform,gsehub/edx-platform,romain-li/edx-platform,pomegranited/edx-platform,lduarte1991/edx-platform,jolyonb/edx-platform,fintech-circle/edx-platform,EDUlib/edx-platform,Stanford-Online/edx-platform,amir-qayyum-khan/edx-platform,jjmiranda/edx-platform,CourseTalk/edx-platform,proversity-org/edx-platform,gsehub/edx-platform,RPI-OPENEDX/edx-platform,edx/edx-platform,10clouds/edx-platform,chrisndodge/edx-platform,simbs/edx-platform,edx-solutions/edx-platform,Stanford-Online/edx-platform,MakeHer/edx-platform,UOMx/edx-platform,devs1991/test_edx_docmode,chrisndodge/edx-platform,Endika/edx-platform,halvertoluke/edx-platform,Edraak/edraak-platform,arbrandes/edx-platform,ESOedX/edx-platform,analyseuc3m/ANALYSE-v1,naresh21/synergetics-edx-platform,jolyonb/edx-platform,Edraak/edx-platform,lduarte1991/edx-platform,prarthitm/edxplatform,Stanford-Online/edx-platform,cecep-edu/edx-platform,chrisndodge/edx-platform,waheedahmed/edx-platform,msegado/edx-platform,miptliot/edx-platform,jzoldak/edx-platform,RPI-OPENEDX/edx-platform,doganov/edx-platform,jjmiranda/edx-platform,arbrandes/edx-platform,doganov/edx-platform,Endika/edx-platform,jzoldak/edx-platform,stvstnfrd/edx-platform,10clouds/edx-platform,lduarte1991/edx-platform,ZLLab-Mooc/edx-platform,defance/edx-platform,eduNEXT/edx-platform,Livit/Livit.Learn.EdX,cecep-edu/edx-platform,mbareta/edx-platform-ft,marcore/edx-platform,synergeticsedx/deployment-wipro,CourseTalk/edx-platform
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
Raise UnicodeDecodeError exception after logging the exception
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
raise
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
<commit_before>"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
<commit_msg>Raise UnicodeDecodeError exception after logging the exception<commit_after>
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
raise
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
Raise UnicodeDecodeError exception after logging the exception"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
raise
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
<commit_before>"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
<commit_msg>Raise UnicodeDecodeError exception after logging the exception<commit_after>"""Event tracker backend that saves events to a python logger."""
from __future__ import absolute_import
import logging
import json
from django.conf import settings
from track.backends import BaseBackend
from track.utils import DateTimeJSONEncoder
log = logging.getLogger('track.backends.logger')
application_log = logging.getLogger('track.backends.application_log') # pylint: disable=invalid-name
class LoggerBackend(BaseBackend):
"""Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, name, **kwargs):
"""Event tracker backend that uses a python logger.
:Parameters:
- `name`: identifier of the logger, which should have
been configured using the default python mechanisms.
"""
super(LoggerBackend, self).__init__(**kwargs)
self.event_logger = logging.getLogger(name)
def send(self, event):
try:
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
except UnicodeDecodeError:
application_log.exception(
"UnicodeDecodeError Event_type: %r, Event_source: %r, Page: %r, Referer: %r",
event.get('event_type'), event.get('event_source'), event.get('page'), event.get('referer')
)
raise
# TODO: remove trucation of the serialized event, either at a
# higher level during the emittion of the event, or by
# providing warnings when the events exceed certain size.
event_str = event_str[:settings.TRACK_MAX_EVENT]
self.event_logger.info(event_str)
|
c6917a2f439b99078e67310230f1d0cfa0de8a7b
|
tests/builder_tests.py
|
tests/builder_tests.py
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
|
Add test helper for creating users
|
Add test helper for creating users
|
Python
|
mit
|
numberoverzero/jsonquery
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
Add test helper for creating users
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
|
<commit_before>import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
<commit_msg>Add test helper for creating users<commit_after>
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
|
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
Add test helper for creating usersimport ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
|
<commit_before>import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
<commit_msg>Add test helper for creating users<commit_after>import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
|
0b741c89ea19759f25526256ee039707cb423cef
|
aldryn_faq/tests/test_menu.py
|
aldryn_faq/tests/test_menu.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from django.utils.translation import (
get_language_from_request,
)
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and is shown
# in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name]
)
# Test that the DE version has 2 categories and that they are shown in
# German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'de')
category2 = self.reload(self.category2, 'de')
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[category1.name, category2.name]
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and its
# question1, and is shown in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
question1 = self.reload(self.question1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name, question1.title]
)
# Test that the DE version has 2 categories and their questions that
# they are shown in German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[self.category1.name, self.category2.name, self.question1.title,
self.question2.title]
)
|
Fix tests to now include the questions, which are now in the menu
|
Fix tests to now include the questions, which are now in the menu
|
Python
|
bsd-3-clause
|
czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from django.utils.translation import (
get_language_from_request,
)
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and is shown
# in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name]
)
# Test that the DE version has 2 categories and that they are shown in
# German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'de')
category2 = self.reload(self.category2, 'de')
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[category1.name, category2.name]
)
Fix tests to now include the questions, which are now in the menu
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and its
# question1, and is shown in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
question1 = self.reload(self.question1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name, question1.title]
)
# Test that the DE version has 2 categories and their questions that
# they are shown in German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[self.category1.name, self.category2.name, self.question1.title,
self.question2.title]
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from django.utils.translation import (
get_language_from_request,
)
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and is shown
# in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name]
)
# Test that the DE version has 2 categories and that they are shown in
# German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'de')
category2 = self.reload(self.category2, 'de')
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[category1.name, category2.name]
)
<commit_msg>Fix tests to now include the questions, which are now in the menu<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and its
# question1, and is shown in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
question1 = self.reload(self.question1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name, question1.title]
)
# Test that the DE version has 2 categories and their questions that
# they are shown in German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[self.category1.name, self.category2.name, self.question1.title,
self.question2.title]
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from django.utils.translation import (
get_language_from_request,
)
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and is shown
# in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name]
)
# Test that the DE version has 2 categories and that they are shown in
# German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'de')
category2 = self.reload(self.category2, 'de')
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[category1.name, category2.name]
)
Fix tests to now include the questions, which are now in the menu# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and its
# question1, and is shown in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
question1 = self.reload(self.question1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name, question1.title]
)
# Test that the DE version has 2 categories and their questions that
# they are shown in German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[self.category1.name, self.category2.name, self.question1.title,
self.question2.title]
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from django.utils.translation import (
get_language_from_request,
)
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and is shown
# in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name]
)
# Test that the DE version has 2 categories and that they are shown in
# German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'de')
category2 = self.reload(self.category2, 'de')
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[category1.name, category2.name]
)
<commit_msg>Fix tests to now include the questions, which are now in the menu<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_faq.menu import FaqCategoryMenu
from .test_base import AldrynFaqTest, CMSRequestBasedTest
class TestMenu(AldrynFaqTest, CMSRequestBasedTest):
def test_get_nodes(self):
# Test that the EN version of the menu has only category1 and its
# question1, and is shown in English.
request = self.get_page_request(None, self.user, '/en/')
menu = FaqCategoryMenu()
category1 = self.reload(self.category1, 'en')
question1 = self.reload(self.question1, 'en')
self.assertEqualItems(
[menuitem.title for menuitem in menu.get_nodes(request)],
[category1.name, question1.title]
)
# Test that the DE version has 2 categories and their questions that
# they are shown in German.
request = self.get_page_request(None, self.user, '/de/')
menu = FaqCategoryMenu()
nodes = menu.get_nodes(request)
self.assertEqualItems(
[menuitem.title for menuitem in nodes],
[self.category1.name, self.category2.name, self.question1.title,
self.question2.title]
)
|
57dfe42d957214d23e1ad28595db5af5adf1a5d6
|
Orange/regression/__init__.py
|
Orange/regression/__init__.py
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
from Orange.classification.simple_tree import *
|
Include simple tree in regression package
|
SimpleTree: Include simple tree in regression package
|
Python
|
bsd-2-clause
|
qPCR4vir/orange3,marinkaz/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,kwikadi/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,marinkaz/orange3,marinkaz/orange3
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
SimpleTree: Include simple tree in regression package
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
from Orange.classification.simple_tree import *
|
<commit_before>from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
<commit_msg>SimpleTree: Include simple tree in regression package<commit_after>
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
from Orange.classification.simple_tree import *
|
from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
SimpleTree: Include simple tree in regression packagefrom .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
from Orange.classification.simple_tree import *
|
<commit_before>from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
<commit_msg>SimpleTree: Include simple tree in regression package<commit_after>from .base_regression import (ModelRegression as Model,
LearnerRegression as Learner,
SklModelRegression as SklModel,
SklLearnerRegression as SklLearner)
from .linear import *
from .mean import *
from .knn import *
from .simple_random_forest import *
from .svm import *
from .random_forest import *
from .tree import *
from Orange.classification.simple_tree import *
|
518df76dcc14895f4555451194f64a98ccc814ef
|
pymco/utils.py
|
pymco/utils.py
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
Use importlib.import_module instead of __import__
|
Use importlib.import_module instead of __import__
|
Python
|
bsd-3-clause
|
rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
Use importlib.import_module instead of __import__
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
<commit_before>"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
<commit_msg>Use importlib.import_module instead of __import__<commit_after>
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
Use importlib.import_module instead of __import__"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
<commit_before>"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
<commit_msg>Use importlib.import_module instead of __import__<commit_after>"""
:py:mod:`pymco.utils`
---------------------
python-mcollective utils.
"""
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
4fe72ff427290e845c0259cd1aadf21dd29b9872
|
kivy/tests/test_video.py
|
kivy/tests/test_video.py
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname
here = dirname(__file__)
source = join(here, "..", "..", "examples", "widgets", "softboy.avi")
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
Fix path and avi -> mpg.
|
Fix path and avi -> mpg.
|
Python
|
mit
|
el-ethan/kivy,angryrancor/kivy,janssen/kivy,rafalo1333/kivy,cbenhagen/kivy,inclement/kivy,Farkal/kivy,jffernandez/kivy,manthansharma/kivy,youprofit/kivy,darkopevec/kivy,jegger/kivy,xiaoyanit/kivy,bob-the-hamster/kivy,Cheaterman/kivy,manthansharma/kivy,LogicalDash/kivy,aron-bordin/kivy,vipulroxx/kivy,andnovar/kivy,iamutkarshtiwari/kivy,habibmasuro/kivy,manthansharma/kivy,edubrunaldi/kivy,mSenyor/kivy,arlowhite/kivy,thezawad/kivy,KeyWeeUsr/kivy,manthansharma/kivy,darkopevec/kivy,Farkal/kivy,denys-duchier/kivy,CuriousLearner/kivy,gonzafirewall/kivy,jkankiewicz/kivy,Farkal/kivy,MiyamotoAkira/kivy,vitorio/kivy,xiaoyanit/kivy,denys-duchier/kivy,KeyWeeUsr/kivy,gonzafirewall/kivy,manashmndl/kivy,darkopevec/kivy,xiaoyanit/kivy,bliz937/kivy,kivy/kivy,bionoid/kivy,bliz937/kivy,yoelk/kivy,arcticshores/kivy,vitorio/kivy,jehutting/kivy,ernstp/kivy,jkankiewicz/kivy,akshayaurora/kivy,yoelk/kivy,Cheaterman/kivy,dirkjot/kivy,dirkjot/kivy,bionoid/kivy,cbenhagen/kivy,rafalo1333/kivy,youprofit/kivy,Ramalus/kivy,jegger/kivy,janssen/kivy,jkankiewicz/kivy,jegger/kivy,MiyamotoAkira/kivy,CuriousLearner/kivy,habibmasuro/kivy,Cheaterman/kivy,arcticshores/kivy,rnixx/kivy,youprofit/kivy,angryrancor/kivy,inclement/kivy,Shyam10/kivy,ernstp/kivy,xpndlabs/kivy,xpndlabs/kivy,angryrancor/kivy,denys-duchier/kivy,el-ethan/kivy,mSenyor/kivy,akshayaurora/kivy,arcticshores/kivy,Ramalus/kivy,autosportlabs/kivy,viralpandey/kivy,thezawad/kivy,jegger/kivy,iamutkarshtiwari/kivy,bhargav2408/kivy,ernstp/kivy,kivy/kivy,matham/kivy,Ramalus/kivy,janssen/kivy,viralpandey/kivy,rnixx/kivy,bhargav2408/kivy,angryrancor/kivy,autosportlabs/kivy,VinGarcia/kivy,vitorio/kivy,MiyamotoAkira/kivy,LogicalDash/kivy,Shyam10/kivy,jffernandez/kivy,Farkal/kivy,dirkjot/kivy,yoelk/kivy,janssen/kivy,bionoid/kivy,akshayaurora/kivy,vipulroxx/kivy,kivy/kivy,bob-the-hamster/kivy,rnixx/kivy,gonzafirewall/kivy,jffernandez/kivy,aron-bordin/kivy,Shyam10/kivy,matham/kivy,adamkh/kivy,andnovar/kivy,edubrunaldi/kivy,arlowhite/kivy,vipulroxx/kivy,viralpandey/kivy,iamutkarshtiwari/kivy,aron-bordin/kivy,rafalo1333/kivy,manashmndl/kivy,kived/kivy,xpndlabs/kivy,adamkh/kivy,kived/kivy,jffernandez/kivy,bionoid/kivy,bob-the-hamster/kivy,tony/kivy,dirkjot/kivy,el-ethan/kivy,Cheaterman/kivy,VinGarcia/kivy,Shyam10/kivy,autosportlabs/kivy,gonzafirewall/kivy,LogicalDash/kivy,bliz937/kivy,MiyamotoAkira/kivy,jehutting/kivy,arlowhite/kivy,CuriousLearner/kivy,aron-bordin/kivy,cbenhagen/kivy,KeyWeeUsr/kivy,edubrunaldi/kivy,arcticshores/kivy,denys-duchier/kivy,jehutting/kivy,matham/kivy,jkankiewicz/kivy,VinGarcia/kivy,mSenyor/kivy,tony/kivy,adamkh/kivy,adamkh/kivy,inclement/kivy,bob-the-hamster/kivy,darkopevec/kivy,ernstp/kivy,LogicalDash/kivy,habibmasuro/kivy,bhargav2408/kivy,tony/kivy,andnovar/kivy,yoelk/kivy,KeyWeeUsr/kivy,matham/kivy,thezawad/kivy,vipulroxx/kivy,kived/kivy,manashmndl/kivy
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname
here = dirname(__file__)
source = join(here, "..", "..", "examples", "widgets", "softboy.avi")
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
Fix path and avi -> mpg.
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
<commit_before>
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname
here = dirname(__file__)
source = join(here, "..", "..", "examples", "widgets", "softboy.avi")
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
<commit_msg>Fix path and avi -> mpg.<commit_after>
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname
here = dirname(__file__)
source = join(here, "..", "..", "examples", "widgets", "softboy.avi")
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
Fix path and avi -> mpg.
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
<commit_before>
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname
here = dirname(__file__)
source = join(here, "..", "..", "examples", "widgets", "softboy.avi")
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
<commit_msg>Fix path and avi -> mpg.<commit_after>
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
|
0728a5b64ec9a871267d3b0b6ea6c3390b7a8e1f
|
clowder/clowder/cli/status_controller.py
|
clowder/clowder/cli/status_controller.py
|
from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
|
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.commands.util import run_group_command
from clowder.util.decorators import network_connection_required
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
if self.app.pargs.fetch:
_fetch_projects(self.clowder_repo, self.clowder)
else:
self.clowder_repo.print_status()
padding = len(max(self.clowder.get_all_project_paths(), key=len))
for group in self.clowder.groups:
print(fmt.group_name(group.name))
for project in group.projects:
print(project.status(padding=padding))
@network_connection_required
def _fetch_projects(clowder_repo, clowder):
"""fetch all projects
:param ClowderRepo clowder_repo: ClowderRepo instance
:param ClowderController clowder: ClowderController instance
"""
clowder_repo.print_status(fetch=True)
print(' - Fetch upstream changes for projects\n')
for group in clowder.groups:
run_group_command(group, [], 'fetch_all')
|
Add `clowder status` logic to Cement controller
|
Add `clowder status` logic to Cement controller
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder status` logic to Cement controller
|
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.commands.util import run_group_command
from clowder.util.decorators import network_connection_required
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
if self.app.pargs.fetch:
_fetch_projects(self.clowder_repo, self.clowder)
else:
self.clowder_repo.print_status()
padding = len(max(self.clowder.get_all_project_paths(), key=len))
for group in self.clowder.groups:
print(fmt.group_name(group.name))
for project in group.projects:
print(project.status(padding=padding))
@network_connection_required
def _fetch_projects(clowder_repo, clowder):
"""fetch all projects
:param ClowderRepo clowder_repo: ClowderRepo instance
:param ClowderController clowder: ClowderController instance
"""
clowder_repo.print_status(fetch=True)
print(' - Fetch upstream changes for projects\n')
for group in clowder.groups:
run_group_command(group, [], 'fetch_all')
|
<commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder status` logic to Cement controller<commit_after>
|
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.commands.util import run_group_command
from clowder.util.decorators import network_connection_required
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
if self.app.pargs.fetch:
_fetch_projects(self.clowder_repo, self.clowder)
else:
self.clowder_repo.print_status()
padding = len(max(self.clowder.get_all_project_paths(), key=len))
for group in self.clowder.groups:
print(fmt.group_name(group.name))
for project in group.projects:
print(project.status(padding=padding))
@network_connection_required
def _fetch_projects(clowder_repo, clowder):
"""fetch all projects
:param ClowderRepo clowder_repo: ClowderRepo instance
:param ClowderController clowder: ClowderController instance
"""
clowder_repo.print_status(fetch=True)
print(' - Fetch upstream changes for projects\n')
for group in clowder.groups:
run_group_command(group, [], 'fetch_all')
|
from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder status` logic to Cement controllerfrom cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.commands.util import run_group_command
from clowder.util.decorators import network_connection_required
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
if self.app.pargs.fetch:
_fetch_projects(self.clowder_repo, self.clowder)
else:
self.clowder_repo.print_status()
padding = len(max(self.clowder.get_all_project_paths(), key=len))
for group in self.clowder.groups:
print(fmt.group_name(group.name))
for project in group.projects:
print(project.status(padding=padding))
@network_connection_required
def _fetch_projects(clowder_repo, clowder):
"""fetch all projects
:param ClowderRepo clowder_repo: ClowderRepo instance
:param ClowderController clowder: ClowderController instance
"""
clowder_repo.print_status(fetch=True)
print(' - Fetch upstream changes for projects\n')
for group in clowder.groups:
run_group_command(group, [], 'fetch_all')
|
<commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder status` logic to Cement controller<commit_after>from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.commands.util import run_group_command
from clowder.util.decorators import network_connection_required
class StatusController(AbstractBaseController):
class Meta:
label = 'status'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print project status'
arguments = AbstractBaseController.Meta.arguments + [
(['--fetch', '-f'], dict(action='store_true', help='fetch projects before printing status'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
if self.app.pargs.fetch:
_fetch_projects(self.clowder_repo, self.clowder)
else:
self.clowder_repo.print_status()
padding = len(max(self.clowder.get_all_project_paths(), key=len))
for group in self.clowder.groups:
print(fmt.group_name(group.name))
for project in group.projects:
print(project.status(padding=padding))
@network_connection_required
def _fetch_projects(clowder_repo, clowder):
"""fetch all projects
:param ClowderRepo clowder_repo: ClowderRepo instance
:param ClowderController clowder: ClowderController instance
"""
clowder_repo.print_status(fetch=True)
print(' - Fetch upstream changes for projects\n')
for group in clowder.groups:
run_group_command(group, [], 'fetch_all')
|
e00e821c9984038c15d9cb9a6db3d4e13a770cb6
|
src/cclib/bridge/cclib2openbabel.py
|
src/cclib/bridge/cclib2openbabel.py
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = atomnos[i]
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = int(atomnos[i])
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Convert atomno to int in case it is a different numpy dtype.
|
Convert atomno to int in case it is a different numpy dtype.
|
Python
|
bsd-3-clause
|
gaursagar/cclib,andersx/cclib,ghutchis/cclib,cclib/cclib,Clyde-fare/cclib,berquist/cclib,jchodera/cclib,Schamnad/cclib,ATenderholt/cclib,langner/cclib,jchodera/cclib,ghutchis/cclib,langner/cclib,cclib/cclib,berquist/cclib,Clyde-fare/cclib,ben-albrecht/cclib,andersx/cclib,gaursagar/cclib,ATenderholt/cclib,ben-albrecht/cclib,berquist/cclib,langner/cclib,cclib/cclib,Schamnad/cclib
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = atomnos[i]
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
Convert atomno to int in case it is a different numpy dtype.
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = int(atomnos[i])
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
<commit_before>"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = atomnos[i]
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
<commit_msg>Convert atomno to int in case it is a different numpy dtype.<commit_after>
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = int(atomnos[i])
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = atomnos[i]
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
Convert atomno to int in case it is a different numpy dtype."""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = int(atomnos[i])
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
<commit_before>"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = atomnos[i]
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
<commit_msg>Convert atomno to int in case it is a different numpy dtype.<commit_after>"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision$"
import openbabel as ob
def makeopenbabel(atomcoords, atomnos, charge=0, mult=1):
"""Create an Open Babel molecule.
>>> import numpy, openbabel
>>> atomnos = numpy.array([1,8,1],"i")
>>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]])
>>> obmol = makeopenbabel(coords, atomnos)
>>> obconversion = openbabel.OBConversion()
>>> formatok = obconversion.SetOutFormat("inchi")
>>> print obconversion.WriteString(obmol).strip()
InChI=1/H2O/h1H2
"""
obmol = ob.OBMol()
for i in range(len(atomnos)):
# Note that list(atomcoords[i]) is not equivalent!!!
coords = atomcoords[i].tolist()
atomno = int(atomnos[i])
obatom = ob.OBAtom()
obatom.SetAtomicNum(atomno)
obatom.SetVector(*coords)
obmol.AddAtom(obatom)
obmol.ConnectTheDots()
obmol.PerceiveBondOrders()
obmol.SetTotalSpinMultiplicity(mult)
obmol.SetTotalCharge(charge)
return obmol
if __name__ == "__main__":
import doctest
doctest.testmod()
|
56c5ba27ecb4324e4c137e9e1595f95ffd58b73a
|
lesscpy/plib/deferred.py
|
lesscpy/plib/deferred.py
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self #return mixins
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return self
# def fmt(self, fills):
# """
# """
# return ''
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return False
|
Add post process stage for stray mixin calls
|
Add post process stage for stray mixin calls
|
Python
|
mit
|
joequery/lesscpy,lesscpy/lesscpy,robotis/lesscpy,fivethreeo/lesscpy
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self #return mixins
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return self
# def fmt(self, fills):
# """
# """
# return ''
Add post process stage for stray mixin calls
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return False
|
<commit_before># -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self #return mixins
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return self
# def fmt(self, fills):
# """
# """
# return ''
<commit_msg>Add post process stage for stray mixin calls<commit_after>
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return False
|
# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self #return mixins
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return self
# def fmt(self, fills):
# """
# """
# return ''
Add post process stage for stray mixin calls# -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return False
|
<commit_before># -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self #return mixins
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return self
# def fmt(self, fills):
# """
# """
# return ''
<commit_msg>Add post process stage for stray mixin calls<commit_after># -*- coding: utf8 -*-
"""
.. module:: lesscpy.plib.deferred
:synopsis: Deferred mixin call.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Jóhann T. Maríusson <jtm@robot.is>
"""
from .node import Node
class Deferred(Node):
def __init__(self, mixin, args):
"""This node represents mixin calls
within the body of other mixins. The calls
to these mixins are deferred until the parent
mixin is called.
args:
mixin (Mixin): Mixin object
args (list): Call arguments
"""
self.mixin = mixin
self.args = args
def parse(self, scope):
""" Parse function.
args:
scope (Scope): Current scope
returns:
mixed
"""
if self.args:
args = [p.parse(scope)
if hasattr(p, 'parse')
else p
for p in self.args]
else:
args = []
if hasattr(self.mixin, 'call'):
return self.mixin.call(scope, args)
mixins = scope.mixins(self.mixin.raw())
if not mixins: return self
for mixin in mixins:
res = mixin.call(scope, args)
if res: return res
return False
|
b5454286a2cfce07f4971b7bc56dd131402f8fe3
|
iati/__init__.py
|
iati/__init__.py
|
"""A top-level namespace package for IATI."""
__import__('pkg_resources').declare_namespace(__name__)
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
|
"""A top-level namespace package for IATI."""
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
__import__('pkg_resources').declare_namespace(__name__)
|
Fix pylint error after iati.core -> iati
|
Fix pylint error after iati.core -> iati
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
"""A top-level namespace package for IATI."""
__import__('pkg_resources').declare_namespace(__name__)
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
Fix pylint error after iati.core -> iati
|
"""A top-level namespace package for IATI."""
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
__import__('pkg_resources').declare_namespace(__name__)
|
<commit_before>"""A top-level namespace package for IATI."""
__import__('pkg_resources').declare_namespace(__name__)
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
<commit_msg>Fix pylint error after iati.core -> iati<commit_after>
|
"""A top-level namespace package for IATI."""
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
__import__('pkg_resources').declare_namespace(__name__)
|
"""A top-level namespace package for IATI."""
__import__('pkg_resources').declare_namespace(__name__)
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
Fix pylint error after iati.core -> iati"""A top-level namespace package for IATI."""
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
__import__('pkg_resources').declare_namespace(__name__)
|
<commit_before>"""A top-level namespace package for IATI."""
__import__('pkg_resources').declare_namespace(__name__)
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
<commit_msg>Fix pylint error after iati.core -> iati<commit_after>"""A top-level namespace package for IATI."""
from .codelists import Code, Codelist # noqa: F401
from .data import Dataset # noqa: F401
from .rulesets import Rule, Ruleset # noqa: F401
from .rulesets import RuleAtLeastOne, RuleDateOrder, RuleDependent, RuleNoMoreThanOne, RuleRegexMatches, RuleRegexNoMatches, RuleStartsWith, RuleSum, RuleUnique # noqa: F401
from .schemas import ActivitySchema, OrganisationSchema # noqa: F401
__import__('pkg_resources').declare_namespace(__name__)
|
08a2220bdacb3e49050a7c223e5c1d8109ae434f
|
ipython-magic.py
|
ipython-magic.py
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
import bohrium
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
try:
import bohrium
have_bohrium = True
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
except ImportError:
warning_shown = False # Warning about missin bohrium has been shown
def execute(__code):
global warning_shown
if not warning_shown:
print("WARNING: Module bohrium could not be imported.\n"
" The magic command '%%bohrium' will have no effect.")
warning_shown = True
exec(__code, globals())
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
Disable the effect of %%bohrium if bohrium cannot be imported.
|
Disable the effect of %%bohrium if bohrium cannot be imported.
The first time the user attempts to use %%bohrium a warning will be
shown. From this point onwards all %%bohrium statements will have no
effect silently.
|
Python
|
apache-2.0
|
bh107/bohrium,madsbk/bohrium,madsbk/bohrium,bh107/bohrium,bh107/bohrium,bh107/bohrium,madsbk/bohrium,madsbk/bohrium
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
import bohrium
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
Disable the effect of %%bohrium if bohrium cannot be imported.
The first time the user attempts to use %%bohrium a warning will be
shown. From this point onwards all %%bohrium statements will have no
effect silently.
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
try:
import bohrium
have_bohrium = True
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
except ImportError:
warning_shown = False # Warning about missin bohrium has been shown
def execute(__code):
global warning_shown
if not warning_shown:
print("WARNING: Module bohrium could not be imported.\n"
" The magic command '%%bohrium' will have no effect.")
warning_shown = True
exec(__code, globals())
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
<commit_before>####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
import bohrium
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
<commit_msg>Disable the effect of %%bohrium if bohrium cannot be imported.
The first time the user attempts to use %%bohrium a warning will be
shown. From this point onwards all %%bohrium statements will have no
effect silently.<commit_after>
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
try:
import bohrium
have_bohrium = True
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
except ImportError:
warning_shown = False # Warning about missin bohrium has been shown
def execute(__code):
global warning_shown
if not warning_shown:
print("WARNING: Module bohrium could not be imported.\n"
" The magic command '%%bohrium' will have no effect.")
warning_shown = True
exec(__code, globals())
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
import bohrium
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
Disable the effect of %%bohrium if bohrium cannot be imported.
The first time the user attempts to use %%bohrium a warning will be
shown. From this point onwards all %%bohrium statements will have no
effect silently.####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
try:
import bohrium
have_bohrium = True
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
except ImportError:
warning_shown = False # Warning about missin bohrium has been shown
def execute(__code):
global warning_shown
if not warning_shown:
print("WARNING: Module bohrium could not be imported.\n"
" The magic command '%%bohrium' will have no effect.")
warning_shown = True
exec(__code, globals())
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
<commit_before>####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
import bohrium
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
<commit_msg>Disable the effect of %%bohrium if bohrium cannot be imported.
The first time the user attempts to use %%bohrium a warning will be
shown. From this point onwards all %%bohrium statements will have no
effect silently.<commit_after>####################################
# This file was created by Bohrium.
# It allows you to run NumPy code (cells) as Bohrium, by using the magic command
# `%%bohrium` in your cells, e.g.:
#
# %%bohrium
# print(numpy)
# print(numpy.arange(10))
####################################
from IPython.core.magic import register_cell_magic
try:
import bohrium
have_bohrium = True
@bohrium.replace_numpy
def execute(__code):
exec(__code, globals(), locals())
__excludes = set(["__excludes", "__code", "np", "bohrium"])
try:
# Python 2.x
for key, value in locals().iteritems():
if key not in __excludes:
globals()[key] = value
except:
# Python 3.x
for key, value in locals().items():
if key not in __excludes:
globals()[key] = value
except ImportError:
warning_shown = False # Warning about missin bohrium has been shown
def execute(__code):
global warning_shown
if not warning_shown:
print("WARNING: Module bohrium could not be imported.\n"
" The magic command '%%bohrium' will have no effect.")
warning_shown = True
exec(__code, globals())
@register_cell_magic
def bohrium(line, cell):
# Code must end with \n
code = cell if cell.endswith("\n") else cell + "\n"
execute(code)
return
|
c2fb467626d586bfb5ddef60fd4d1447515ad161
|
fpsd/evaluation.py
|
fpsd/evaluation.py
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
def plot_feature_importances(feature_names, feature_importances, N=30):
importances = list(zip(feature_names, list(feature_importances)))
importances = pd.DataFrame(importances, columns=["Feature", "Importance"])
importances = importances.set_index("Feature")
# Sort by the absolute value of the importance of the feature
importances["sort"] = abs(importances["Importance"])
importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1)
importances = importances[0:N]
# Show the most important positive feature at the top of the graph
importances = importances.sort(columns="Importance", ascending=True)
with plt.style.context(('ggplot')):
fig, ax = plt.subplots(figsize=(16,12))
ax.tick_params(labelsize=16)
importances.plot(kind="barh", legend=False, ax=ax)
ax.set_frame_on(False)
ax.set_xlabel("Relative importance", fontsize=20)
ax.set_ylabel("Feature name", fontsize=20)
plt.tight_layout()
plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99])
return fig
|
Add function for plotting feature importances
|
Add function for plotting feature importances
|
Python
|
agpl-3.0
|
freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
Add function for plotting feature importances
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
def plot_feature_importances(feature_names, feature_importances, N=30):
importances = list(zip(feature_names, list(feature_importances)))
importances = pd.DataFrame(importances, columns=["Feature", "Importance"])
importances = importances.set_index("Feature")
# Sort by the absolute value of the importance of the feature
importances["sort"] = abs(importances["Importance"])
importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1)
importances = importances[0:N]
# Show the most important positive feature at the top of the graph
importances = importances.sort(columns="Importance", ascending=True)
with plt.style.context(('ggplot')):
fig, ax = plt.subplots(figsize=(16,12))
ax.tick_params(labelsize=16)
importances.plot(kind="barh", legend=False, ax=ax)
ax.set_frame_on(False)
ax.set_xlabel("Relative importance", fontsize=20)
ax.set_ylabel("Feature name", fontsize=20)
plt.tight_layout()
plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99])
return fig
|
<commit_before>def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
<commit_msg>Add function for plotting feature importances<commit_after>
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
def plot_feature_importances(feature_names, feature_importances, N=30):
importances = list(zip(feature_names, list(feature_importances)))
importances = pd.DataFrame(importances, columns=["Feature", "Importance"])
importances = importances.set_index("Feature")
# Sort by the absolute value of the importance of the feature
importances["sort"] = abs(importances["Importance"])
importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1)
importances = importances[0:N]
# Show the most important positive feature at the top of the graph
importances = importances.sort(columns="Importance", ascending=True)
with plt.style.context(('ggplot')):
fig, ax = plt.subplots(figsize=(16,12))
ax.tick_params(labelsize=16)
importances.plot(kind="barh", legend=False, ax=ax)
ax.set_frame_on(False)
ax.set_xlabel("Relative importance", fontsize=20)
ax.set_ylabel("Feature name", fontsize=20)
plt.tight_layout()
plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99])
return fig
|
def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
Add function for plotting feature importancesdef get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
def plot_feature_importances(feature_names, feature_importances, N=30):
importances = list(zip(feature_names, list(feature_importances)))
importances = pd.DataFrame(importances, columns=["Feature", "Importance"])
importances = importances.set_index("Feature")
# Sort by the absolute value of the importance of the feature
importances["sort"] = abs(importances["Importance"])
importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1)
importances = importances[0:N]
# Show the most important positive feature at the top of the graph
importances = importances.sort(columns="Importance", ascending=True)
with plt.style.context(('ggplot')):
fig, ax = plt.subplots(figsize=(16,12))
ax.tick_params(labelsize=16)
importances.plot(kind="barh", legend=False, ax=ax)
ax.set_frame_on(False)
ax.set_xlabel("Relative importance", fontsize=20)
ax.set_ylabel("Feature name", fontsize=20)
plt.tight_layout()
plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99])
return fig
|
<commit_before>def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
<commit_msg>Add function for plotting feature importances<commit_after>def get_feature_importances(model):
try:
return model.feature_importances_
except:
pass
try:
# Must be 1D for feature importance plot
if len(model.coef_) <= 1:
return model.coef_[0]
else:
return model.coef_
except:
pass
return None
def plot_feature_importances(feature_names, feature_importances, N=30):
importances = list(zip(feature_names, list(feature_importances)))
importances = pd.DataFrame(importances, columns=["Feature", "Importance"])
importances = importances.set_index("Feature")
# Sort by the absolute value of the importance of the feature
importances["sort"] = abs(importances["Importance"])
importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1)
importances = importances[0:N]
# Show the most important positive feature at the top of the graph
importances = importances.sort(columns="Importance", ascending=True)
with plt.style.context(('ggplot')):
fig, ax = plt.subplots(figsize=(16,12))
ax.tick_params(labelsize=16)
importances.plot(kind="barh", legend=False, ax=ax)
ax.set_frame_on(False)
ax.set_xlabel("Relative importance", fontsize=20)
ax.set_ylabel("Feature name", fontsize=20)
plt.tight_layout()
plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99])
return fig
|
11529d7ad4d428bdd9f5a58adc1085a665d4f222
|
uconnrcmpy/__init__.py
|
uconnrcmpy/__init__.py
|
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import compare_to_sim
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'compare_to_sim',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
Load the external interface on package import
|
Load the external interface on package import
Set __init__.py so that the useful classes are loaded when the package
is loaded
|
Python
|
bsd-3-clause
|
bryanwweber/UConnRCMPy
|
Load the external interface on package import
Set __init__.py so that the useful classes are loaded when the package
is loaded
|
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import compare_to_sim
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'compare_to_sim',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
<commit_before><commit_msg>Load the external interface on package import
Set __init__.py so that the useful classes are loaded when the package
is loaded<commit_after>
|
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import compare_to_sim
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'compare_to_sim',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
Load the external interface on package import
Set __init__.py so that the useful classes are loaded when the package
is loadedfrom .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import compare_to_sim
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'compare_to_sim',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
<commit_before><commit_msg>Load the external interface on package import
Set __init__.py so that the useful classes are loaded when the package
is loaded<commit_after>from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import compare_to_sim
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'compare_to_sim',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
|
1ac105b7efa3ae4c531fdcc8a626ab47d86e0192
|
tests/test_gen_schema_reading_and_writing.py
|
tests/test_gen_schema_reading_and_writing.py
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
pass
@pytest.mark.skip
class TestGenSchemaReader(object):
"""Tests for Genfile parsing method."""
pass
@pytest.mark.skip
class TestGenSchemaWriter(object):
"""Tests for the Genfile writing method."""
pass
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
import json
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
return [
("tests/resources/genfile1_single_var_single_arg.gen", "tests/resources/configs1.json"),
("tests/resources/genfile2_single_var_multiple_args.gen", "tests/resources/configs2.json"),
("tests/resources/genfile3_multiple_vars.gen", "tests/resources/configs3.json"),
("tests/resources/genfile4_simple_nested_preconstructed_args.gen", "tests/resources/configs4.json"),
("tests/resources/genfile5_simple_nested.gen", "tests/resources/configs5.json"),
("tests/resources/genfile6_multiple_vars_in_nest.gen", "tests/resources/configs6.json"),
("tests/resources/genfile7_incomplete_nested.gen", "tests/resources/configs7.json"),
("tests/resources/genfile8_multiple_nests.gen", "tests/resources/configs8.json"),
("tests/resources/genfile9_multi_nested.gen", "tests/resources/configs9.json"),
("tests/resources/genfile10_multiple_vars_own_nest.gen", "tests/resources/configs10.json"),
("tests/resources/genfile11_commented", "tests/resources/configs11.json")
]
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_read(genfile, config_file):
"""Test the Genfile read method with all examples in test/resources."""
schema = GenSchema.read(genfile)
configs = json.load(open(config_file))
pytest.helpers.compare_configs(configs, schema)
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_write(genfile, config_file):
"""Test the Genfile write method with all examples in test/resources."""
pass
|
Write parameterized test for gen schema read function.
|
Write parameterized test for gen schema read function.
|
Python
|
mit
|
becketta/ctip
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
pass
@pytest.mark.skip
class TestGenSchemaReader(object):
"""Tests for Genfile parsing method."""
pass
@pytest.mark.skip
class TestGenSchemaWriter(object):
"""Tests for the Genfile writing method."""
pass
Write parameterized test for gen schema read function.
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
import json
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
return [
("tests/resources/genfile1_single_var_single_arg.gen", "tests/resources/configs1.json"),
("tests/resources/genfile2_single_var_multiple_args.gen", "tests/resources/configs2.json"),
("tests/resources/genfile3_multiple_vars.gen", "tests/resources/configs3.json"),
("tests/resources/genfile4_simple_nested_preconstructed_args.gen", "tests/resources/configs4.json"),
("tests/resources/genfile5_simple_nested.gen", "tests/resources/configs5.json"),
("tests/resources/genfile6_multiple_vars_in_nest.gen", "tests/resources/configs6.json"),
("tests/resources/genfile7_incomplete_nested.gen", "tests/resources/configs7.json"),
("tests/resources/genfile8_multiple_nests.gen", "tests/resources/configs8.json"),
("tests/resources/genfile9_multi_nested.gen", "tests/resources/configs9.json"),
("tests/resources/genfile10_multiple_vars_own_nest.gen", "tests/resources/configs10.json"),
("tests/resources/genfile11_commented", "tests/resources/configs11.json")
]
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_read(genfile, config_file):
"""Test the Genfile read method with all examples in test/resources."""
schema = GenSchema.read(genfile)
configs = json.load(open(config_file))
pytest.helpers.compare_configs(configs, schema)
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_write(genfile, config_file):
"""Test the Genfile write method with all examples in test/resources."""
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
pass
@pytest.mark.skip
class TestGenSchemaReader(object):
"""Tests for Genfile parsing method."""
pass
@pytest.mark.skip
class TestGenSchemaWriter(object):
"""Tests for the Genfile writing method."""
pass
<commit_msg>Write parameterized test for gen schema read function.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
import json
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
return [
("tests/resources/genfile1_single_var_single_arg.gen", "tests/resources/configs1.json"),
("tests/resources/genfile2_single_var_multiple_args.gen", "tests/resources/configs2.json"),
("tests/resources/genfile3_multiple_vars.gen", "tests/resources/configs3.json"),
("tests/resources/genfile4_simple_nested_preconstructed_args.gen", "tests/resources/configs4.json"),
("tests/resources/genfile5_simple_nested.gen", "tests/resources/configs5.json"),
("tests/resources/genfile6_multiple_vars_in_nest.gen", "tests/resources/configs6.json"),
("tests/resources/genfile7_incomplete_nested.gen", "tests/resources/configs7.json"),
("tests/resources/genfile8_multiple_nests.gen", "tests/resources/configs8.json"),
("tests/resources/genfile9_multi_nested.gen", "tests/resources/configs9.json"),
("tests/resources/genfile10_multiple_vars_own_nest.gen", "tests/resources/configs10.json"),
("tests/resources/genfile11_commented", "tests/resources/configs11.json")
]
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_read(genfile, config_file):
"""Test the Genfile read method with all examples in test/resources."""
schema = GenSchema.read(genfile)
configs = json.load(open(config_file))
pytest.helpers.compare_configs(configs, schema)
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_write(genfile, config_file):
"""Test the Genfile write method with all examples in test/resources."""
pass
|
# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
pass
@pytest.mark.skip
class TestGenSchemaReader(object):
"""Tests for Genfile parsing method."""
pass
@pytest.mark.skip
class TestGenSchemaWriter(object):
"""Tests for the Genfile writing method."""
pass
Write parameterized test for gen schema read function.# -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
import json
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
return [
("tests/resources/genfile1_single_var_single_arg.gen", "tests/resources/configs1.json"),
("tests/resources/genfile2_single_var_multiple_args.gen", "tests/resources/configs2.json"),
("tests/resources/genfile3_multiple_vars.gen", "tests/resources/configs3.json"),
("tests/resources/genfile4_simple_nested_preconstructed_args.gen", "tests/resources/configs4.json"),
("tests/resources/genfile5_simple_nested.gen", "tests/resources/configs5.json"),
("tests/resources/genfile6_multiple_vars_in_nest.gen", "tests/resources/configs6.json"),
("tests/resources/genfile7_incomplete_nested.gen", "tests/resources/configs7.json"),
("tests/resources/genfile8_multiple_nests.gen", "tests/resources/configs8.json"),
("tests/resources/genfile9_multi_nested.gen", "tests/resources/configs9.json"),
("tests/resources/genfile10_multiple_vars_own_nest.gen", "tests/resources/configs10.json"),
("tests/resources/genfile11_commented", "tests/resources/configs11.json")
]
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_read(genfile, config_file):
"""Test the Genfile read method with all examples in test/resources."""
schema = GenSchema.read(genfile)
configs = json.load(open(config_file))
pytest.helpers.compare_configs(configs, schema)
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_write(genfile, config_file):
"""Test the Genfile write method with all examples in test/resources."""
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
pass
@pytest.mark.skip
class TestGenSchemaReader(object):
"""Tests for Genfile parsing method."""
pass
@pytest.mark.skip
class TestGenSchemaWriter(object):
"""Tests for the Genfile writing method."""
pass
<commit_msg>Write parameterized test for gen schema read function.<commit_after># -*- coding: utf-8 -*-
"""
Test parsing genfiles and writing GenSchema to genfiles.
Created on Sun Jul 10 19:54:47 2016
@author: Aaron Beckett
"""
import pytest
import json
from ctip import GenSchema
def gather_test_files():
"""Search the tests/resources directory for pairs of gen and config files."""
return [
("tests/resources/genfile1_single_var_single_arg.gen", "tests/resources/configs1.json"),
("tests/resources/genfile2_single_var_multiple_args.gen", "tests/resources/configs2.json"),
("tests/resources/genfile3_multiple_vars.gen", "tests/resources/configs3.json"),
("tests/resources/genfile4_simple_nested_preconstructed_args.gen", "tests/resources/configs4.json"),
("tests/resources/genfile5_simple_nested.gen", "tests/resources/configs5.json"),
("tests/resources/genfile6_multiple_vars_in_nest.gen", "tests/resources/configs6.json"),
("tests/resources/genfile7_incomplete_nested.gen", "tests/resources/configs7.json"),
("tests/resources/genfile8_multiple_nests.gen", "tests/resources/configs8.json"),
("tests/resources/genfile9_multi_nested.gen", "tests/resources/configs9.json"),
("tests/resources/genfile10_multiple_vars_own_nest.gen", "tests/resources/configs10.json"),
("tests/resources/genfile11_commented", "tests/resources/configs11.json")
]
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_read(genfile, config_file):
"""Test the Genfile read method with all examples in test/resources."""
schema = GenSchema.read(genfile)
configs = json.load(open(config_file))
pytest.helpers.compare_configs(configs, schema)
@pytest.mark.parametrize("genfile,config_file", gather_test_files())
def test_gen_schema_write(genfile, config_file):
"""Test the Genfile write method with all examples in test/resources."""
pass
|
78d520b88e13a35ac20a0eeea1385f35b17383d2
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
yield i
not_prime.update(range(i*i, n, i))
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
|
Switch to more optimal non-generator solution
|
Switch to more optimal non-generator solution
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
yield i
not_prime.update(range(i*i, n, i))
Switch to more optimal non-generator solution
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
|
<commit_before>def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
yield i
not_prime.update(range(i*i, n, i))
<commit_msg>Switch to more optimal non-generator solution<commit_after>
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
yield i
not_prime.update(range(i*i, n, i))
Switch to more optimal non-generator solutiondef sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
|
<commit_before>def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
yield i
not_prime.update(range(i*i, n, i))
<commit_msg>Switch to more optimal non-generator solution<commit_after>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
|
9cb249fc2f7bc1043d50f7d9424026a3a68e4f2a
|
python/ql/test/query-tests/Security/CWE-295-RequestWithoutValidation/make_request.py
|
python/ql/test/query-tests/Security/CWE-295-RequestWithoutValidation/make_request.py
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
s = requests.Session()
s.get("url", verify=False) # BAD
|
Add test we don't handle for `py/request-without-cert-validation`
|
Python: Add test we don't handle for `py/request-without-cert-validation`
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
Python: Add test we don't handle for `py/request-without-cert-validation`
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
s = requests.Session()
s.get("url", verify=False) # BAD
|
<commit_before>import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
<commit_msg>Python: Add test we don't handle for `py/request-without-cert-validation`<commit_after>
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
s = requests.Session()
s.get("url", verify=False) # BAD
|
import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
Python: Add test we don't handle for `py/request-without-cert-validation`import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
s = requests.Session()
s.get("url", verify=False) # BAD
|
<commit_before>import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
<commit_msg>Python: Add test we don't handle for `py/request-without-cert-validation`<commit_after>import requests
#Simple cases
requests.get('https://semmle.com', verify=True) # GOOD
requests.get('https://semmle.com', verify=False) # BAD
requests.post('https://semmle.com', verify=True) # GOOD
requests.post('https://semmle.com', verify=False) # BAD
# Simple flow
put = requests.put
put('https://semmle.com', verify="/path/to/cert/") # GOOD
put('https://semmle.com', verify=False) # BAD
#Other flow
delete = requests.delete
def req1(verify=False):
delete('https://semmle.com', verify) # BAD
if verify:
delete('https://semmle.com', verify) # GOOD
if not verify:
return
delete('https://semmle.com', verify) # GOOD
patch = requests.patch
def req2(verify):
patch('https://semmle.com', verify=verify) # BAD (from line 30)
req2(False) # BAD (at line 28)
req2("/path/to/cert/") # GOOD
#Falsey value
requests.post('https://semmle.com', verify=0) # BAD
# requests treat `None` as default value, which means it is turned on
requests.get('https://semmle.com') # OK
requests.get('https://semmle.com', verify=None) # OK
s = requests.Session()
s.get("url", verify=False) # BAD
|
55f8bce3a4d1232f2b7ffbdfa2c1cf741686a33f
|
lots/migrations/0002_auto_20170717_2115.py
|
lots/migrations/0002_auto_20170717_2115.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
Receipt.objects.all().delete()
Fee.objects.all().delete()
Lot.objects.all().delete()
LotType.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations, connection
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM lots_lot_contacts')
cursor.execute('DELETE FROM lots_contact')
cursor.execute('DELETE FROM lots_lot')
cursor.execute('DELETE FROM lots_lottype')
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
Make reverse migration for lot_type run
|
Make reverse migration for lot_type run
|
Python
|
mpl-2.0
|
jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
Receipt.objects.all().delete()
Fee.objects.all().delete()
Lot.objects.all().delete()
LotType.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
Make reverse migration for lot_type run
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations, connection
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM lots_lot_contacts')
cursor.execute('DELETE FROM lots_contact')
cursor.execute('DELETE FROM lots_lot')
cursor.execute('DELETE FROM lots_lottype')
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
Receipt.objects.all().delete()
Fee.objects.all().delete()
Lot.objects.all().delete()
LotType.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
<commit_msg>Make reverse migration for lot_type run<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations, connection
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM lots_lot_contacts')
cursor.execute('DELETE FROM lots_contact')
cursor.execute('DELETE FROM lots_lot')
cursor.execute('DELETE FROM lots_lottype')
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
Receipt.objects.all().delete()
Fee.objects.all().delete()
Lot.objects.all().delete()
LotType.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
Make reverse migration for lot_type run# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations, connection
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM lots_lot_contacts')
cursor.execute('DELETE FROM lots_contact')
cursor.execute('DELETE FROM lots_lot')
cursor.execute('DELETE FROM lots_lottype')
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
Receipt.objects.all().delete()
Fee.objects.all().delete()
Lot.objects.all().delete()
LotType.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
<commit_msg>Make reverse migration for lot_type run<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 02:15
from __future__ import unicode_literals
from django.db import models, migrations, connection
from lots.models import LotType, Lot
from revenue.models import Fee, Receipt
def load_data(apps, schema_editor):
LotType = apps.get_model("lots", "LotType")
LotType(name="Casa").save()
LotType(name="Lote").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM lots_lot_contacts')
cursor.execute('DELETE FROM lots_contact')
cursor.execute('DELETE FROM lots_lot')
cursor.execute('DELETE FROM lots_lottype')
class Migration(migrations.Migration):
dependencies = [
('lots', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
|
9ffe8a195af0a2504728e4764d093152959474e8
|
mrp_product_variants/models/procurement.py
|
mrp_product_variants/models/procurement.py
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
result['product_attribute_ids'] = (
(0, 0, x) for x in product._get_product_attributes_values_dict())
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
product_attribute_ids = product._get_product_attributes_values_dict()
result['product_attribute_ids'] = map(
lambda x: (0, 0, x), product_attribute_ids)
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
Fix MTO configurator not filled
|
[FIX] mrp_product_variants: Fix MTO configurator not filled
|
Python
|
agpl-3.0
|
Eficent/odoomrp-wip,oihane/odoomrp-wip,odoomrp/odoomrp-wip,jobiols/odoomrp-wip,esthermm/odoomrp-wip,esthermm/odoomrp-wip,Eficent/odoomrp-wip,jobiols/odoomrp-wip,Daniel-CA/odoomrp-wip-public,diagramsoftware/odoomrp-wip,diagramsoftware/odoomrp-wip,sergiocorato/odoomrp-wip,sergiocorato/odoomrp-wip,factorlibre/odoomrp-wip,factorlibre/odoomrp-wip,Daniel-CA/odoomrp-wip-public,oihane/odoomrp-wip,agaldona/odoomrp-wip-1,odoomrp/odoomrp-wip,agaldona/odoomrp-wip-1
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
result['product_attribute_ids'] = (
(0, 0, x) for x in product._get_product_attributes_values_dict())
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
[FIX] mrp_product_variants: Fix MTO configurator not filled
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
product_attribute_ids = product._get_product_attributes_values_dict()
result['product_attribute_ids'] = map(
lambda x: (0, 0, x), product_attribute_ids)
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
result['product_attribute_ids'] = (
(0, 0, x) for x in product._get_product_attributes_values_dict())
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
<commit_msg>[FIX] mrp_product_variants: Fix MTO configurator not filled<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
product_attribute_ids = product._get_product_attributes_values_dict()
result['product_attribute_ids'] = map(
lambda x: (0, 0, x), product_attribute_ids)
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
result['product_attribute_ids'] = (
(0, 0, x) for x in product._get_product_attributes_values_dict())
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
[FIX] mrp_product_variants: Fix MTO configurator not filled# -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
product_attribute_ids = product._get_product_attributes_values_dict()
result['product_attribute_ids'] = map(
lambda x: (0, 0, x), product_attribute_ids)
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
result['product_attribute_ids'] = (
(0, 0, x) for x in product._get_product_attributes_values_dict())
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
<commit_msg>[FIX] mrp_product_variants: Fix MTO configurator not filled<commit_after># -*- coding: utf-8 -*-
# © 2015 Oihane Crucelaegui - AvanzOSC
# © 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.model
def _prepare_mo_vals(self, procurement):
result = super(ProcurementOrder, self)._prepare_mo_vals(procurement)
product_id = result.get('product_id')
product = self.env['product.product'].browse(product_id)
result['product_tmpl_id'] = product.product_tmpl_id.id
product_attribute_ids = product._get_product_attributes_values_dict()
result['product_attribute_ids'] = map(
lambda x: (0, 0, x), product_attribute_ids)
for val in result['product_attribute_ids']:
val = val[2]
val['product_tmpl_id'] = product.product_tmpl_id.id
val['owner_model'] = 'mrp.production'
return result
|
ce6c4cb4bcac22fecd0a4a00624c7bc7eca325d0
|
saltapi/cli.py
|
saltapi/cli.py
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
'''
CLI entry-point for salt-api
'''
# Import python libs
import sys
import logging
# Import salt libs
import salt.utils.verify
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
log = logging.getLogger(__name__)
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
try:
if self.config['verify_env']:
logfile = self.config['log_file']
if logfile is not None and not logfile.startswith('tcp://') \
and not logfile.startswith('udp://') \
and not logfile.startswith('file://'):
# Logfile is not using Syslog, verify
salt.utils.verify.verify_files(
[logfile], self.config['user']
)
except OSError as err:
log.error(err)
sys.exit(err.errno)
self.setup_logfile_logger()
client = saltapi.client.SaltAPIClient(self.config)
self.daemonize_if_required()
self.set_pidfile()
client.run()
|
Enforce verify file on the log file and actually setup the log file logger.
|
Enforce verify file on the log file and actually setup the log file logger.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
Enforce verify file on the log file and actually setup the log file logger.
|
'''
CLI entry-point for salt-api
'''
# Import python libs
import sys
import logging
# Import salt libs
import salt.utils.verify
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
log = logging.getLogger(__name__)
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
try:
if self.config['verify_env']:
logfile = self.config['log_file']
if logfile is not None and not logfile.startswith('tcp://') \
and not logfile.startswith('udp://') \
and not logfile.startswith('file://'):
# Logfile is not using Syslog, verify
salt.utils.verify.verify_files(
[logfile], self.config['user']
)
except OSError as err:
log.error(err)
sys.exit(err.errno)
self.setup_logfile_logger()
client = saltapi.client.SaltAPIClient(self.config)
self.daemonize_if_required()
self.set_pidfile()
client.run()
|
<commit_before>'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
<commit_msg>Enforce verify file on the log file and actually setup the log file logger.<commit_after>
|
'''
CLI entry-point for salt-api
'''
# Import python libs
import sys
import logging
# Import salt libs
import salt.utils.verify
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
log = logging.getLogger(__name__)
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
try:
if self.config['verify_env']:
logfile = self.config['log_file']
if logfile is not None and not logfile.startswith('tcp://') \
and not logfile.startswith('udp://') \
and not logfile.startswith('file://'):
# Logfile is not using Syslog, verify
salt.utils.verify.verify_files(
[logfile], self.config['user']
)
except OSError as err:
log.error(err)
sys.exit(err.errno)
self.setup_logfile_logger()
client = saltapi.client.SaltAPIClient(self.config)
self.daemonize_if_required()
self.set_pidfile()
client.run()
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
Enforce verify file on the log file and actually setup the log file logger.'''
CLI entry-point for salt-api
'''
# Import python libs
import sys
import logging
# Import salt libs
import salt.utils.verify
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
log = logging.getLogger(__name__)
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
try:
if self.config['verify_env']:
logfile = self.config['log_file']
if logfile is not None and not logfile.startswith('tcp://') \
and not logfile.startswith('udp://') \
and not logfile.startswith('file://'):
# Logfile is not using Syslog, verify
salt.utils.verify.verify_files(
[logfile], self.config['user']
)
except OSError as err:
log.error(err)
sys.exit(err.errno)
self.setup_logfile_logger()
client = saltapi.client.SaltAPIClient(self.config)
self.daemonize_if_required()
self.set_pidfile()
client.run()
|
<commit_before>'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
<commit_msg>Enforce verify file on the log file and actually setup the log file logger.<commit_after>'''
CLI entry-point for salt-api
'''
# Import python libs
import sys
import logging
# Import salt libs
import salt.utils.verify
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
log = logging.getLogger(__name__)
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
try:
if self.config['verify_env']:
logfile = self.config['log_file']
if logfile is not None and not logfile.startswith('tcp://') \
and not logfile.startswith('udp://') \
and not logfile.startswith('file://'):
# Logfile is not using Syslog, verify
salt.utils.verify.verify_files(
[logfile], self.config['user']
)
except OSError as err:
log.error(err)
sys.exit(err.errno)
self.setup_logfile_logger()
client = saltapi.client.SaltAPIClient(self.config)
self.daemonize_if_required()
self.set_pidfile()
client.run()
|
257afb0046c4af30bbfe0d46c36f0ec3257051b6
|
glooey/__init__.py
|
glooey/__init__.py
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
Make the themes module available by default.
|
Make the themes module available by default.
|
Python
|
mit
|
kxgames/glooey,kxgames/glooey
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
Make the themes module available by default.
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
<commit_before>#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
<commit_msg>Make the themes module available by default.<commit_after>
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
Make the themes module available by default.#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
<commit_before>#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
<commit_msg>Make the themes module available by default.<commit_after>#!/usr/bin/env python3
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
bea2e64d8ed8ab2a368d660a15ed2f8485fdc29a
|
set_offline.py
|
set_offline.py
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.channelupdater as ChannelUpdater
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
cf.load_configs(default_config, user_config)
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, None, embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
if not os.path.isfile(user_config):
print("Could not find config at: \n{}".format(user_config))
exit()
cf.load_configs(default_config, user_config)
import SLA_bot.channelupdater as ChannelUpdater
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
body = '''
[Hourly EQ](http://pso2emq.flyergo.eu/)
[Calendar (JST)](https://calendar.google.com/calendar/embed?src=pso2emgquest@gmail.com&mode=agenda)
[Official schedule](http://pso2.jp/players/boost/)'''
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
embed.add_field(name='--------------------', value=body)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, ' ', embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
Add EQ related links in offline bot message
|
Add EQ related links in offline bot message
|
Python
|
mit
|
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.channelupdater as ChannelUpdater
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
cf.load_configs(default_config, user_config)
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, None, embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
Add EQ related links in offline bot message
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
if not os.path.isfile(user_config):
print("Could not find config at: \n{}".format(user_config))
exit()
cf.load_configs(default_config, user_config)
import SLA_bot.channelupdater as ChannelUpdater
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
body = '''
[Hourly EQ](http://pso2emq.flyergo.eu/)
[Calendar (JST)](https://calendar.google.com/calendar/embed?src=pso2emgquest@gmail.com&mode=agenda)
[Official schedule](http://pso2.jp/players/boost/)'''
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
embed.add_field(name='--------------------', value=body)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, ' ', embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
<commit_before>import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.channelupdater as ChannelUpdater
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
cf.load_configs(default_config, user_config)
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, None, embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
<commit_msg>Add EQ related links in offline bot message<commit_after>
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
if not os.path.isfile(user_config):
print("Could not find config at: \n{}".format(user_config))
exit()
cf.load_configs(default_config, user_config)
import SLA_bot.channelupdater as ChannelUpdater
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
body = '''
[Hourly EQ](http://pso2emq.flyergo.eu/)
[Calendar (JST)](https://calendar.google.com/calendar/embed?src=pso2emgquest@gmail.com&mode=agenda)
[Official schedule](http://pso2.jp/players/boost/)'''
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
embed.add_field(name='--------------------', value=body)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, ' ', embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.channelupdater as ChannelUpdater
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
cf.load_configs(default_config, user_config)
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, None, embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
Add EQ related links in offline bot messageimport asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
if not os.path.isfile(user_config):
print("Could not find config at: \n{}".format(user_config))
exit()
cf.load_configs(default_config, user_config)
import SLA_bot.channelupdater as ChannelUpdater
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
body = '''
[Hourly EQ](http://pso2emq.flyergo.eu/)
[Calendar (JST)](https://calendar.google.com/calendar/embed?src=pso2emgquest@gmail.com&mode=agenda)
[Official schedule](http://pso2.jp/players/boost/)'''
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
embed.add_field(name='--------------------', value=body)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, ' ', embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
<commit_before>import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.channelupdater as ChannelUpdater
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
cf.load_configs(default_config, user_config)
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, None, embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
<commit_msg>Add EQ related links in offline bot message<commit_after>import asyncio
import os
import discord
from discord.ext import commands
import SLA_bot.config as cf
curr_dir = os.path.dirname(__file__)
default_config = os.path.join(curr_dir, 'default_config.ini'),
user_config = os.path.join(curr_dir, 'config.ini')
if not os.path.isfile(user_config):
print("Could not find config at: \n{}".format(user_config))
exit()
cf.load_configs(default_config, user_config)
import SLA_bot.channelupdater as ChannelUpdater
prefix = cf.get('General', 'command_prefix')
bot = commands.Bot(command_prefix=prefix)
body = '''
[Hourly EQ](http://pso2emq.flyergo.eu/)
[Calendar (JST)](https://calendar.google.com/calendar/embed?src=pso2emgquest@gmail.com&mode=agenda)
[Official schedule](http://pso2.jp/players/boost/)'''
async def set_offline():
ChannelUpdater.bot = bot
await ChannelUpdater.load_channels()
embed=discord.Embed(title='Bot is offline.', color=0xdc4a4a)
embed.add_field(name='--------------------', value=body)
for channel, messages in ChannelUpdater.channel_messages.items():
await ChannelUpdater.write_content(channel, ' ', embed)
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
await set_offline()
await bot.logout()
bot.run(cf.get('General', 'bot_token'))
|
45116fc996b097176bcfa2dcd7fb8c9710f6d66e
|
tests/test_basics.py
|
tests/test_basics.py
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
pretty_print_xml(tree.getroot())
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
Remove debug printing from test case
|
Remove debug printing from test case
|
Python
|
apache-2.0
|
t4ngo/sphinxcontrib-traceables
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
pretty_print_xml(tree.getroot())
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
Remove debug printing from test case
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
<commit_before>
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
pretty_print_xml(tree.getroot())
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
<commit_msg>Remove debug printing from test case<commit_after>
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
pretty_print_xml(tree.getroot())
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
Remove debug printing from test case
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
<commit_before>
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
pretty_print_xml(tree.getroot())
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
<commit_msg>Remove debug printing from test case<commit_after>
import os
from xml.etree import ElementTree
from utils import with_app, pretty_print_xml
#=============================================================================
# Tests
@with_app(buildername="xml", srcdir="basics")
def test_basics(app, status, warning):
app.build()
tree = ElementTree.parse(app.outdir / "index.xml")
# Verify that 2 traceables are found.
assert len(tree.findall(".//target")) == 2
assert len(tree.findall(".//index")) == 2
assert len(tree.findall(".//admonition")) == 2
assert len(tree.findall(".//admonition")) == 2
# Verify that child-parent relationship are made.
assert len(tree.findall(".//field_list")) == 2
parent_fields, child_fields = tree.findall(".//field_list")
for field in parent_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "child":
break
else:
assert False, "Parent's child field not found!"
for field in child_fields:
field_name = field.findall("./field_name")[0]
if field_name.text == "parent":
break
else:
assert False, "Child's parent field not found!"
# Verify that a warning is emitted for unknown traceable tag.
assert (warning.getvalue().find(
"WARNING: Traceables: no traceable with tag"
" 'NONEXISTENT' found!") > 0)
|
00712888b761bce556b73e36c9c7270829d3a1d4
|
tests/test_entity.py
|
tests/test_entity.py
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
def test_bad_json():
with pytest.raises(TypeError):
BaseEntityJSONEncoder().encode(set())
|
Test the failure branch in BaseEntityJSONDecoder
|
Test the failure branch in BaseEntityJSONDecoder
|
Python
|
mit
|
spaceboats/busbus
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
Test the failure branch in BaseEntityJSONDecoder
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
def test_bad_json():
with pytest.raises(TypeError):
BaseEntityJSONEncoder().encode(set())
|
<commit_before>from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
<commit_msg>Test the failure branch in BaseEntityJSONDecoder<commit_after>
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
def test_bad_json():
with pytest.raises(TypeError):
BaseEntityJSONEncoder().encode(set())
|
from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
Test the failure branch in BaseEntityJSONDecoderfrom test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
def test_bad_json():
with pytest.raises(TypeError):
BaseEntityJSONEncoder().encode(set())
|
<commit_before>from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
<commit_msg>Test the failure branch in BaseEntityJSONDecoder<commit_after>from test_provider_gtfs import provider
from busbus.entity import BaseEntityJSONEncoder
import json
import pytest
@pytest.fixture(scope='module')
def agency(provider):
return next(provider.agencies)
def test_entity_repr(agency):
assert 'DTA' in repr(agency)
def test_entity_failed_getattr(agency):
with pytest.raises(AttributeError):
agency.the_weather_in_london
def test_entity_failed_getitem(agency):
with pytest.raises(KeyError):
agency['the_weather_in_london']
def test_entity_to_dict(agency):
assert dict(agency)['id'] == 'DTA'
def test_entity_to_json(provider):
json_str = BaseEntityJSONEncoder().encode(next(provider.arrivals))
json.loads(json_str)
def test_bad_json():
with pytest.raises(TypeError):
BaseEntityJSONEncoder().encode(set())
|
6c2d8d3b2a5e148085e65df66b9c66c543c2dcb0
|
spacy/about.py
|
spacy/about.py
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-nightly.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
Use shortcuts-nightly.json to resolve model shortcuts
|
Use shortcuts-nightly.json to resolve model shortcuts
|
Python
|
mit
|
aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
Use shortcuts-nightly.json to resolve model shortcuts
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-nightly.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
<commit_msg>Use shortcuts-nightly.json to resolve model shortcuts<commit_after>
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-nightly.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
Use shortcuts-nightly.json to resolve model shortcuts# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-nightly.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
<commit_msg>Use shortcuts-nightly.json to resolve model shortcuts<commit_after># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a18'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://alpha.spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-nightly.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
f0d87f1979ace66f530bb8f7f00cdc71ac8f549c
|
chainer/datasets/__init__.py
|
chainer/datasets/__init__.py
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
Add LabeledImageDataset to datasets module
|
Add LabeledImageDataset to datasets module
|
Python
|
mit
|
chainer/chainer,kiyukuta/chainer,wkentaro/chainer,tkerola/chainer,kikusu/chainer,ysekky/chainer,wkentaro/chainer,delta2323/chainer,chainer/chainer,okuta/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,cupy/cupy,ktnyt/chainer,jnishi/chainer,kikusu/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,aonotas/chainer,ktnyt/chainer,anaruse/chainer,hvy/chainer,kashif/chainer,cupy/cupy,okuta/chainer,ronekko/chainer,ktnyt/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,pfnet/chainer,okuta/chainer,jnishi/chainer,cupy/cupy,niboshi/chainer,jnishi/chainer,ktnyt/chainer,chainer/chainer,jnishi/chainer,rezoo/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,cupy/cupy,wkentaro/chainer
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
Add LabeledImageDataset to datasets module
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
<commit_before>from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
<commit_msg>Add LabeledImageDataset to datasets module<commit_after>
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
Add LabeledImageDataset to datasets modulefrom chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
<commit_before>from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
<commit_msg>Add LabeledImageDataset to datasets module<commit_after>from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
4051794670ec252cb972ed0c8cd1a5203e8a8de4
|
amplpy/amplpython/__init__.py
|
amplpy/amplpython/__init__.py
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
Fix 'ImportError: DLL load failed'
|
Fix 'ImportError: DLL load failed'
|
Python
|
bsd-3-clause
|
ampl/amplpy,ampl/amplpy,ampl/amplpy
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
Fix 'ImportError: DLL load failed'
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
<commit_msg>Fix 'ImportError: DLL load failed'<commit_after>
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
Fix 'ImportError: DLL load failed'# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
<commit_msg>Fix 'ImportError: DLL load failed'<commit_after># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
lib32 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib32')
lib64 = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
31bb7c86a65dffb44a2950659da9f9299bb4023f
|
tests/fixtures/water_supply_exec.py
|
tests/fixtures/water_supply_exec.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from . water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
Revert "Used relative import for water_supply fixture"
|
Revert "Used relative import for water_supply fixture"
This reverts commit 8615f9c9d8a254dc6a43229e0ec8fc68ebe12e08.
|
Python
|
mit
|
willu47/smif,tomalrussell/smif,willu47/smif,nismod/smif,willu47/smif,nismod/smif,willu47/smif,nismod/smif,tomalrussell/smif,tomalrussell/smif,tomalrussell/smif,nismod/smif
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from . water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
Revert "Used relative import for water_supply fixture"
This reverts commit 8615f9c9d8a254dc6a43229e0ec8fc68ebe12e08.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from . water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
<commit_msg>Revert "Used relative import for water_supply fixture"
This reverts commit 8615f9c9d8a254dc6a43229e0ec8fc68ebe12e08.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from . water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
Revert "Used relative import for water_supply fixture"
This reverts commit 8615f9c9d8a254dc6a43229e0ec8fc68ebe12e08.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from . water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
<commit_msg>Revert "Used relative import for water_supply fixture"
This reverts commit 8615f9c9d8a254dc6a43229e0ec8fc68ebe12e08.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Implements example simulation model which can be run from the command line
Arguments
=========
raininess : int
Sets the amount of rain
"""
from argparse import ArgumentParser
from water_supply import ExampleWaterSupplySimulation
def argparse():
parser = ArgumentParser()
parser.add_argument("--raininess",
type=int,
help="Sets the amount of rain")
return parser.parse_args()
def main():
args = argparse()
water_supply = ExampleWaterSupplySimulation(args.raininess)
results = water_supply.simulate()
for key, val in results.items():
print("{},{}".format(key, val))
if __name__ == '__main__':
main()
|
0d0e354627441daf33ea8c5702c3977de992cc7a
|
tests/unit/utils/test_yamldumper.py
|
tests/unit/utils/test_yamldumper.py
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.dump(data) == '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == '!!python/unicode \'foo\': !!python/unicode \'bar\'\n'
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.ext.six
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
if salt.ext.six.PY2:
exp_yaml = '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
else:
exp_yaml = '{foo: bar}\n'
assert salt.utils.yamldumper.dump(data) == exp_yaml
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == exp_yaml.replace('{', '').replace('}', '')
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
Fix yamldumper test for both py2/py3
|
Fix yamldumper test for both py2/py3
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.dump(data) == '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == '!!python/unicode \'foo\': !!python/unicode \'bar\'\n'
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
Fix yamldumper test for both py2/py3
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.ext.six
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
if salt.ext.six.PY2:
exp_yaml = '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
else:
exp_yaml = '{foo: bar}\n'
assert salt.utils.yamldumper.dump(data) == exp_yaml
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == exp_yaml.replace('{', '').replace('}', '')
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
<commit_before># -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.dump(data) == '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == '!!python/unicode \'foo\': !!python/unicode \'bar\'\n'
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
<commit_msg>Fix yamldumper test for both py2/py3<commit_after>
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.ext.six
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
if salt.ext.six.PY2:
exp_yaml = '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
else:
exp_yaml = '{foo: bar}\n'
assert salt.utils.yamldumper.dump(data) == exp_yaml
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == exp_yaml.replace('{', '').replace('}', '')
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.dump(data) == '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == '!!python/unicode \'foo\': !!python/unicode \'bar\'\n'
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
Fix yamldumper test for both py2/py3# -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.ext.six
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
if salt.ext.six.PY2:
exp_yaml = '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
else:
exp_yaml = '{foo: bar}\n'
assert salt.utils.yamldumper.dump(data) == exp_yaml
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == exp_yaml.replace('{', '').replace('}', '')
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
<commit_before># -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.dump(data) == '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == '!!python/unicode \'foo\': !!python/unicode \'bar\'\n'
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
<commit_msg>Fix yamldumper test for both py2/py3<commit_after># -*- coding: utf-8 -*-
'''
Unit tests for salt.utils.yamldumper
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.ext.six
import salt.utils.yamldumper
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class YamlDumperTestCase(TestCase):
'''
TestCase for salt.utils.yamldumper module
'''
def test_yaml_dump(self):
'''
Test yaml.dump a dict
'''
data = {'foo': 'bar'}
if salt.ext.six.PY2:
exp_yaml = '{!!python/unicode \'foo\': !!python/unicode \'bar\'}\n'
else:
exp_yaml = '{foo: bar}\n'
assert salt.utils.yamldumper.dump(data) == exp_yaml
assert salt.utils.yamldumper.dump(data, default_flow_style=False) == exp_yaml.replace('{', '').replace('}', '')
def test_yaml_safe_dump(self):
'''
Test yaml.safe_dump a dict
'''
data = {'foo': 'bar'}
assert salt.utils.yamldumper.safe_dump(data) == '{foo: bar}\n'
assert salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == 'foo: bar\n'
|
8b0bccf5dbe86accd967bfc8cb0ee6db049ea23c
|
service/posts/serializers.py
|
service/posts/serializers.py
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to_author",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
Update visible_to reference to visible_to_author
|
Update visible_to reference to visible_to_author
|
Python
|
apache-2.0
|
TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
Update visible_to reference to visible_to_author
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to_author",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
<commit_before>from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
<commit_msg>Update visible_to reference to visible_to_author<commit_after>
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to_author",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
Update visible_to reference to visible_to_authorfrom rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to_author",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
<commit_before>from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
<commit_msg>Update visible_to reference to visible_to_author<commit_after>from rest_framework import serializers
from service.authors.serializers import SimpleAuthorSerializer
from service.comments.serializers import CommentSerializer
from social.app.models.post import Post
class PostSerializer(serializers.HyperlinkedModelSerializer):
# Not required by the spec, but makes testing a little easier
url = serializers.HyperlinkedIdentityField(
view_name="service:post-detail",
source='id'
)
author = SimpleAuthorSerializer()
comments = CommentSerializer(many=True)
contentType = serializers.CharField(source="content_type", read_only=True)
visibleTo = serializers.HyperlinkedRelatedField(
many=True,
read_only=True,
source="visible_to_author",
view_name="service:author-detail",
lookup_field="pk"
)
categories = serializers.ListField(
source="categories_list",
read_only=True
)
class Meta:
model = Post
fields = ("title", "source", "origin", "description", "contentType", "content", "author",
"categories", "comments", "published", "id", "url", "visibility", "visibleTo",
"unlisted")
|
8555f6c4076a485d7615b8caef861536096c0ac1
|
scripts/app.py
|
scripts/app.py
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.setTransformer(1)
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
Load source dataset and save transformed dataset
|
Load source dataset and save transformed dataset
|
Python
|
mit
|
rsk-mind/rsk-mind-framework
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.setTransformer(1)
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
Load source dataset and save transformed dataset
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
<commit_before>from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.setTransformer(1)
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
<commit_msg>Load source dataset and save transformed dataset<commit_after>
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.setTransformer(1)
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
Load source dataset and save transformed datasetfrom rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
<commit_before>from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.setTransformer(1)
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
<commit_msg>Load source dataset and save transformed dataset<commit_after>from rsk_mind.datasource import CSVDatasource
datasource = CSVDatasource('in.csv')
dataset = datasource.read()
dataset.applyTransformations()
datasource = CSVDatasource('out.csv')
datasource.write(dataset)
|
586fab3cdc9e059c082bf209a6113b6bb06f2119
|
knox/settings.py
|
knox/settings.py
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import api_settings, APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'LOGIN_AUTHENTICATION_CLASSES': api_settings.DEFAULT_AUTHENTICATION_CLASSES,
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
Revert "separate default authentication from the DRF's one"
|
Revert "separate default authentication from the DRF's one"
This reverts commit 73aef41ffd2be2fbed11cf75f75393a80322bdcb.
|
Python
|
mit
|
James1345/django-rest-knox,James1345/django-rest-knox
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import api_settings, APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'LOGIN_AUTHENTICATION_CLASSES': api_settings.DEFAULT_AUTHENTICATION_CLASSES,
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
Revert "separate default authentication from the DRF's one"
This reverts commit 73aef41ffd2be2fbed11cf75f75393a80322bdcb.
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
<commit_before>from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import api_settings, APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'LOGIN_AUTHENTICATION_CLASSES': api_settings.DEFAULT_AUTHENTICATION_CLASSES,
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
<commit_msg>Revert "separate default authentication from the DRF's one"
This reverts commit 73aef41ffd2be2fbed11cf75f75393a80322bdcb.<commit_after>
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import api_settings, APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'LOGIN_AUTHENTICATION_CLASSES': api_settings.DEFAULT_AUTHENTICATION_CLASSES,
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
Revert "separate default authentication from the DRF's one"
This reverts commit 73aef41ffd2be2fbed11cf75f75393a80322bdcb.from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
<commit_before>from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import api_settings, APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'LOGIN_AUTHENTICATION_CLASSES': api_settings.DEFAULT_AUTHENTICATION_CLASSES,
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
<commit_msg>Revert "separate default authentication from the DRF's one"
This reverts commit 73aef41ffd2be2fbed11cf75f75393a80322bdcb.<commit_after>from datetime import timedelta
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'REST_KNOX', None)
DEFAULTS = {
'SECURE_HASH_ALGORITHM': 'cryptography.hazmat.primitives.hashes.SHA512',
'AUTH_TOKEN_CHARACTER_LENGTH': 64,
'TOKEN_TTL': timedelta(hours=10),
'USER_SERIALIZER': 'knox.serializers.UserSerializer',
}
IMPORT_STRINGS = {
'SECURE_HASH_ALGORITHM',
'USER_SERIALIZER',
}
knox_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global knox_settings
setting, value = kwargs['setting'], kwargs['value']
if setting == 'REST_KNOX':
knox_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
class CONSTANTS:
'''
Constants cannot be changed at runtime
'''
TOKEN_KEY_LENGTH = 8
DIGEST_LENGTH = 128
SALT_LENGTH = 16
def __setattr__(self, *args, **kwargs):
raise RuntimeException('''
Constant values must NEVER be changed at runtime, as they are
integral to the structure of database tables
''')
CONSTANTS = CONSTANTS()
|
151f05738d760909d5c3eba6b6d7c182aa77e8d4
|
opps/core/admin.py
|
opps/core/admin.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name',
'child_class']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
Add child_class on list filter PublishableAdmin
|
Add child_class on list filter PublishableAdmin
|
Python
|
mit
|
williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
Add child_class on list filter PublishableAdmin
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name',
'child_class']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
<commit_msg>Add child_class on list filter PublishableAdmin<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name',
'child_class']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
Add child_class on list filter PublishableAdmin#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name',
'child_class']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
<commit_msg>Add child_class on list filter PublishableAdmin<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import Site
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel_name', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel_name',
'child_class']
search_fields = ['title', 'slug', 'headline', 'channel_name']
exclude = ('user',)
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.date_insert = timezone.now()
obj.site = Site.objects.get(pk=settings.SITE_ID)
obj.date_update = timezone.now()
obj.save()
|
b3ff448c44af0a7d342364fb482d629e80b6ee40
|
sipa/model/pycroft/schema.py
|
sipa/model/pycroft/schema.py
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: int
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from decimal import Decimal
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: Decimal
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
Fix pycroft backend displaying wrong finance balance
|
Fix pycroft backend displaying wrong finance balance
|
Python
|
mit
|
agdsn/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa,MarauderXtreme/sipa
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: int
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
Fix pycroft backend displaying wrong finance balance
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from decimal import Decimal
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: Decimal
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import annotations
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: int
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
<commit_msg>Fix pycroft backend displaying wrong finance balance<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from decimal import Decimal
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: Decimal
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
# -*- coding: utf-8 -*-
from __future__ import annotations
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: int
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
Fix pycroft backend displaying wrong finance balance# -*- coding: utf-8 -*-
from __future__ import annotations
from decimal import Decimal
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: Decimal
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import annotations
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: int
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
<commit_msg>Fix pycroft backend displaying wrong finance balance<commit_after># -*- coding: utf-8 -*-
from __future__ import annotations
from decimal import Decimal
from typing import List, Optional
from sipa.model.pycroft.unserialize import unserializer
@unserializer
class UserData:
id: int
user_id: str
login: str
name: str
status: UserStatus
room: str
mail: str
cache: bool
traffic_history: List[TrafficHistoryEntry]
interfaces: List[Interface]
finance_balance: Decimal
finance_history: List[FinanceHistoryEntry]
# TODO implement `cls.Meta.custom_constructors`, use `parse_date` for this
last_finance_update: str
# TODO introduce properties once they can be excluded
@unserializer
class UserStatus:
member: bool
traffic_exceeded: bool
network_access: bool
account_balanced: bool
violation: bool
@unserializer
class Interface:
id: int
mac: str
ips: List[str]
@unserializer
class TrafficHistoryEntry:
timestamp: str
ingress: Optional[int]
egress: Optional[int]
@unserializer
class FinanceHistoryEntry:
valid_on: str
amount: int
description: str
|
48ffcca081ab1d143e9941e67e6cc5c6a2844d23
|
pygotham/admin/talks.py
|
pygotham/admin/talks.py
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
Add filters to the talk admin
|
Add filters to the talk admin
@logston is reviewing talks and wanted to see the talk duration. He also
thought it would be useful to be able to filter by the duration and
status of the talk.
|
Python
|
bsd-3-clause
|
djds23/pygotham-1,pathunstrom/pygotham,djds23/pygotham-1,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,djds23/pygotham-1,pathunstrom/pygotham,pathunstrom/pygotham,PyGotham/pygotham,PyGotham/pygotham,pathunstrom/pygotham,pathunstrom/pygotham
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
Add filters to the talk admin
@logston is reviewing talks and wanted to see the talk duration. He also
thought it would be useful to be able to filter by the duration and
status of the talk.
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
<commit_before>"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
<commit_msg>Add filters to the talk admin
@logston is reviewing talks and wanted to see the talk duration. He also
thought it would be useful to be able to filter by the duration and
status of the talk.<commit_after>
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
Add filters to the talk admin
@logston is reviewing talks and wanted to see the talk duration. He also
thought it would be useful to be able to filter by the duration and
status of the talk."""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
<commit_before>"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
<commit_msg>Add filters to the talk admin
@logston is reviewing talks and wanted to see the talk duration. He also
thought it would be useful to be able to filter by the duration and
status of the talk.<commit_after>"""Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
|
ad3173b5f701cc27532103fcffe52deca67432b7
|
user_profile/models.py
|
user_profile/models.py
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(blank=True, upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
Change user_profile so picture can be null
|
Change user_profile so picture can be null
|
Python
|
mit
|
DeWaster/Tviserrys,DeWaster/Tviserrys
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
Change user_profile so picture can be null
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(blank=True, upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
<commit_msg>Change user_profile so picture can be null<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(blank=True, upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
Change user_profile so picture can be nullfrom django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(blank=True, upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
<commit_msg>Change user_profile so picture can be null<commit_after>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(blank=True, upload_to='media/profiles/')
thumbnail = models.ImageField(
upload_to='media/profiles/thumb/',
max_length=500,
null=True,
blank=True
)
follows = models.ManyToManyField("self", blank=True)
|
175ffb66a58d8f05150a50b2a6dce30663f5999c
|
user_profile/models.py
|
user_profile/models.py
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
follows = models.ManyToManyField("self", blank=True, null=True)
|
Add following into user profile
|
Add following into user profile
|
Python
|
mit
|
DeWaster/Tviserrys,DeWaster/Tviserrys
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')Add following into user profile
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
follows = models.ManyToManyField("self", blank=True, null=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')<commit_msg>Add following into user profile<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
follows = models.ManyToManyField("self", blank=True, null=True)
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')Add following into user profilefrom django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
follows = models.ManyToManyField("self", blank=True, null=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')<commit_msg>Add following into user profile<commit_after>from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(max_length=3000)
picture = models.ImageField(upload_to='media/profiles/')
follows = models.ManyToManyField("self", blank=True, null=True)
|
8a7be30e2847f6d50f401dedc616d667cb36a6c6
|
rx/linq/observable/average.py
|
rx/linq/observable/average.py
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def selector(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().select(selector)
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def mapper(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().map(mapper)
|
Rename from select to map
|
Rename from select to map
|
Python
|
mit
|
dbrattli/RxPY,ReactiveX/RxPY,ReactiveX/RxPY
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def selector(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().select(selector)
Rename from select to map
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def mapper(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().map(mapper)
|
<commit_before>from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def selector(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().select(selector)
<commit_msg>Rename from select to map<commit_after>
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def mapper(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().map(mapper)
|
from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def selector(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().select(selector)
Rename from select to mapfrom six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def mapper(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().map(mapper)
|
<commit_before>from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def selector(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().select(selector)
<commit_msg>Rename from select to map<commit_after>from six import add_metaclass
from rx import Observable
from rx.internal import ExtensionMethod
class AverageValue(object):
def __init__(self, sum, count):
self.sum = sum
self.count = count
@add_metaclass(ExtensionMethod)
class ObservableAverage(Observable):
"""Uses a meta class to extend Observable with the methods in this class"""
def average(self, key_selector=None):
"""Computes the average of an observable sequence of values that are in
the sequence or obtained by invoking a transform function on each
element of the input sequence if present.
Example
res = source.average();
res = source.average(lambda x: x.value)
key_selector -- A transform function to apply to each element.
Returns an observable sequence containing a single element with the
average of the sequence of values."""
if key_selector:
return self.select(key_selector).average()
def accumulator(prev, cur):
return AverageValue(sum=prev.sum+cur, count=prev.count+1)
def mapper(s):
if s.count == 0:
raise Exception('The input sequence was empty')
return s.sum / float(s.count)
seed = AverageValue(sum=0, count=0)
return self.scan(accumulator, seed).last().map(mapper)
|
d7f80b24f37ffb5e5cd1f7b2ccfa83c144a79c4d
|
ironic/tests/__init__.py
|
ironic/tests/__init__.py
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# NOTE(viktors): We can't use mock as third-party library in python 3.4 because
# of bug https://code.google.com/p/mock/issues/detail?id=234
# so let's use mock from standard library in python 3.x
import six
if six.PY3:
import sys
import unittest.mock
sys.modules['mock'] = unittest.mock
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
|
Remove broken workaround code for old mock.
|
Remove broken workaround code for old mock.
Mock <= 1.0.1 was indeed broken on 3.4, but unittest.mock from 3.4 is
just as (or more) broken. Mock is now fixed, so don't play games with
the global state of the import system.
Change-Id: I5e04b773d33c63d5cf06ff60c321de70de453b69
Closes-Bug: #1488252
Partial-Bug: #1463867
|
Python
|
apache-2.0
|
openstack/ironic,bacaldwell/ironic,pshchelo/ironic,ionutbalutoiu/ironic,NaohiroTamura/ironic,devananda/ironic,bacaldwell/ironic,pshchelo/ironic,naterh/ironic,ionutbalutoiu/ironic,redhat-openstack/ironic,SauloAislan/ironic,openstack/ironic,hpproliant/ironic,dims/ironic,dims/ironic,SauloAislan/ironic,NaohiroTamura/ironic
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# NOTE(viktors): We can't use mock as third-party library in python 3.4 because
# of bug https://code.google.com/p/mock/issues/detail?id=234
# so let's use mock from standard library in python 3.x
import six
if six.PY3:
import sys
import unittest.mock
sys.modules['mock'] = unittest.mock
Remove broken workaround code for old mock.
Mock <= 1.0.1 was indeed broken on 3.4, but unittest.mock from 3.4 is
just as (or more) broken. Mock is now fixed, so don't play games with
the global state of the import system.
Change-Id: I5e04b773d33c63d5cf06ff60c321de70de453b69
Closes-Bug: #1488252
Partial-Bug: #1463867
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
|
<commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# NOTE(viktors): We can't use mock as third-party library in python 3.4 because
# of bug https://code.google.com/p/mock/issues/detail?id=234
# so let's use mock from standard library in python 3.x
import six
if six.PY3:
import sys
import unittest.mock
sys.modules['mock'] = unittest.mock
<commit_msg>Remove broken workaround code for old mock.
Mock <= 1.0.1 was indeed broken on 3.4, but unittest.mock from 3.4 is
just as (or more) broken. Mock is now fixed, so don't play games with
the global state of the import system.
Change-Id: I5e04b773d33c63d5cf06ff60c321de70de453b69
Closes-Bug: #1488252
Partial-Bug: #1463867<commit_after>
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# NOTE(viktors): We can't use mock as third-party library in python 3.4 because
# of bug https://code.google.com/p/mock/issues/detail?id=234
# so let's use mock from standard library in python 3.x
import six
if six.PY3:
import sys
import unittest.mock
sys.modules['mock'] = unittest.mock
Remove broken workaround code for old mock.
Mock <= 1.0.1 was indeed broken on 3.4, but unittest.mock from 3.4 is
just as (or more) broken. Mock is now fixed, so don't play games with
the global state of the import system.
Change-Id: I5e04b773d33c63d5cf06ff60c321de70de453b69
Closes-Bug: #1488252
Partial-Bug: #1463867# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
|
<commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# NOTE(viktors): We can't use mock as third-party library in python 3.4 because
# of bug https://code.google.com/p/mock/issues/detail?id=234
# so let's use mock from standard library in python 3.x
import six
if six.PY3:
import sys
import unittest.mock
sys.modules['mock'] = unittest.mock
<commit_msg>Remove broken workaround code for old mock.
Mock <= 1.0.1 was indeed broken on 3.4, but unittest.mock from 3.4 is
just as (or more) broken. Mock is now fixed, so don't play games with
the global state of the import system.
Change-Id: I5e04b773d33c63d5cf06ff60c321de70de453b69
Closes-Bug: #1488252
Partial-Bug: #1463867<commit_after># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`Ironic.tests` -- ironic Unittests
=====================================================
.. automodule:: ironic.tests
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
|
059e5afd9b0bf81e70be177e60b37d21a557be4f
|
kivy/tests/test_fonts.py
|
kivy/tests/test_fonts.py
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), 'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), u'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).
|
Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).
|
Python
|
mit
|
tony/kivy,jegger/kivy,xpndlabs/kivy,rnixx/kivy,CuriousLearner/kivy,manthansharma/kivy,janssen/kivy,bliz937/kivy,jehutting/kivy,bionoid/kivy,xpndlabs/kivy,cbenhagen/kivy,JohnHowland/kivy,LogicalDash/kivy,jkankiewicz/kivy,yoelk/kivy,jegger/kivy,dirkjot/kivy,arcticshores/kivy,Farkal/kivy,angryrancor/kivy,jffernandez/kivy,Shyam10/kivy,darkopevec/kivy,youprofit/kivy,janssen/kivy,gonzafirewall/kivy,ernstp/kivy,youprofit/kivy,CuriousLearner/kivy,vipulroxx/kivy,Farkal/kivy,aron-bordin/kivy,aron-bordin/kivy,denys-duchier/kivy,denys-duchier/kivy,arcticshores/kivy,kived/kivy,arlowhite/kivy,vitorio/kivy,tony/kivy,adamkh/kivy,kivy/kivy,manashmndl/kivy,vitorio/kivy,jehutting/kivy,angryrancor/kivy,andnovar/kivy,youprofit/kivy,autosportlabs/kivy,niavlys/kivy,el-ethan/kivy,inclement/kivy,iamutkarshtiwari/kivy,xpndlabs/kivy,niavlys/kivy,bliz937/kivy,manashmndl/kivy,jkankiewicz/kivy,gonzafirewall/kivy,el-ethan/kivy,darkopevec/kivy,jkankiewicz/kivy,andnovar/kivy,gonzafirewall/kivy,janssen/kivy,thezawad/kivy,mSenyor/kivy,matham/kivy,edubrunaldi/kivy,yoelk/kivy,xiaoyanit/kivy,manthansharma/kivy,kivy/kivy,dirkjot/kivy,mSenyor/kivy,akshayaurora/kivy,rnixx/kivy,Ramalus/kivy,bob-the-hamster/kivy,angryrancor/kivy,Cheaterman/kivy,jegger/kivy,aron-bordin/kivy,arlowhite/kivy,xiaoyanit/kivy,ernstp/kivy,autosportlabs/kivy,Cheaterman/kivy,iamutkarshtiwari/kivy,bob-the-hamster/kivy,darkopevec/kivy,kived/kivy,Shyam10/kivy,KeyWeeUsr/kivy,bionoid/kivy,JohnHowland/kivy,inclement/kivy,cbenhagen/kivy,bhargav2408/kivy,Shyam10/kivy,niavlys/kivy,Farkal/kivy,akshayaurora/kivy,xiaoyanit/kivy,yoelk/kivy,vipulroxx/kivy,Shyam10/kivy,akshayaurora/kivy,andnovar/kivy,iamutkarshtiwari/kivy,CuriousLearner/kivy,arcticshores/kivy,jffernandez/kivy,denys-duchier/kivy,jehutting/kivy,viralpandey/kivy,dirkjot/kivy,el-ethan/kivy,rafalo1333/kivy,vipulroxx/kivy,bob-the-hamster/kivy,manthansharma/kivy,niavlys/kivy,janssen/kivy,KeyWeeUsr/kivy,bionoid/kivy,dirkjot/kivy,Cheaterman/kivy,vitorio/kivy,LogicalDash/kivy,VinGarcia/kivy,bliz937/kivy,rnixx/kivy,JohnHowland/kivy,VinGarcia/kivy,vipulroxx/kivy,arcticshores/kivy,Ramalus/kivy,KeyWeeUsr/kivy,edubrunaldi/kivy,ernstp/kivy,matham/kivy,VinGarcia/kivy,jffernandez/kivy,manthansharma/kivy,tony/kivy,adamkh/kivy,jegger/kivy,MiyamotoAkira/kivy,jkankiewicz/kivy,adamkh/kivy,habibmasuro/kivy,rafalo1333/kivy,darkopevec/kivy,jffernandez/kivy,Farkal/kivy,viralpandey/kivy,ernstp/kivy,adamkh/kivy,LogicalDash/kivy,viralpandey/kivy,MiyamotoAkira/kivy,Cheaterman/kivy,manashmndl/kivy,bhargav2408/kivy,bionoid/kivy,habibmasuro/kivy,matham/kivy,LogicalDash/kivy,KeyWeeUsr/kivy,gonzafirewall/kivy,arlowhite/kivy,denys-duchier/kivy,MiyamotoAkira/kivy,edubrunaldi/kivy,habibmasuro/kivy,aron-bordin/kivy,mSenyor/kivy,rafalo1333/kivy,autosportlabs/kivy,inclement/kivy,angryrancor/kivy,yoelk/kivy,thezawad/kivy,bob-the-hamster/kivy,JohnHowland/kivy,MiyamotoAkira/kivy,cbenhagen/kivy,thezawad/kivy,Ramalus/kivy,matham/kivy,bhargav2408/kivy,kived/kivy,kivy/kivy
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), 'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), u'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
<commit_before>#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), 'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
<commit_msg>Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).<commit_after>
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), u'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), 'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), u'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
<commit_before>#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), 'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
<commit_msg>Fix font test to use unicode font filename for proper loading to allow passing test on windows (and other os's).<commit_after>#-*- coding: utf-8 -*-
import unittest
class FontTestCase(unittest.TestCase):
def setUp(self):
import os
self.font_name = os.path.join(os.path.dirname(__file__), u'कीवी.ttf')
if not os.path.exists(self.font_name):
from zipfile import ZipFile
with ZipFile(os.path.join(os.path.dirname(__file__),
'unicode_font.zip'), 'r') as myzip:
myzip.extractall(path=os.path.dirname(__file__))
print(self.font_name)
def test_unicode_name(self):
from kivy.core.text import Label
lbl = Label(font_name=self.font_name)
lbl.refresh()
self.assertNotEqual(lbl.get_extents(''), None)
|
a7c3b4266fe4688ffdf91b9c85a93bea3660957e
|
statzlogger.py
|
statzlogger.py
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class Collection(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(logging.Handler):
pass
class Top(logging.Handler):
pass
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
|
Create and use a new base class.
|
Create and use a new base class.
Since all handlers need to implement emit(), this is probably the best route
forward.
|
Python
|
isc
|
whilp/statzlogger
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class Collection(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(logging.Handler):
pass
class Top(logging.Handler):
pass
Create and use a new base class.
Since all handlers need to implement emit(), this is probably the best route
forward.
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
|
<commit_before>import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class Collection(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(logging.Handler):
pass
class Top(logging.Handler):
pass
<commit_msg>Create and use a new base class.
Since all handlers need to implement emit(), this is probably the best route
forward.<commit_after>
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
|
import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class Collection(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(logging.Handler):
pass
class Top(logging.Handler):
pass
Create and use a new base class.
Since all handlers need to implement emit(), this is probably the best route
forward.import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
|
<commit_before>import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class Collection(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(logging.Handler):
pass
class Top(logging.Handler):
pass
<commit_msg>Create and use a new base class.
Since all handlers need to implement emit(), this is probably the best route
forward.<commit_after>import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indexes = {}
def emit(self, record):
log.debug("Got record: %s", record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
|
6de96ef01d24d01e704b35864d5a687f60063f7e
|
kirppu/app/urls.py
|
kirppu/app/urls.py
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)', 'checkout_view'),
)
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)$', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)$', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)$', 'checkout_view'),
)
|
Add line-ends to terminal url regexps.
|
Add line-ends to terminal url regexps.
- This ensures that parameters are not ignored if they do not match the
regexp.
Conflicts:
kirppu/app/urls.py
|
Python
|
mit
|
jlaunonen/kirppu,mniemela/kirppu,mniemela/kirppu,jlaunonen/kirppu,jlaunonen/kirppu,mniemela/kirppu,jlaunonen/kirppu
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)', 'checkout_view'),
)
Add line-ends to terminal url regexps.
- This ensures that parameters are not ignored if they do not match the
regexp.
Conflicts:
kirppu/app/urls.py
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)$', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)$', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)$', 'checkout_view'),
)
|
<commit_before>from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)', 'checkout_view'),
)
<commit_msg>Add line-ends to terminal url regexps.
- This ensures that parameters are not ignored if they do not match the
regexp.
Conflicts:
kirppu/app/urls.py<commit_after>
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)$', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)$', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)$', 'checkout_view'),
)
|
from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)', 'checkout_view'),
)
Add line-ends to terminal url regexps.
- This ensures that parameters are not ignored if they do not match the
regexp.
Conflicts:
kirppu/app/urls.pyfrom django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)$', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)$', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)$', 'checkout_view'),
)
|
<commit_before>from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)', 'checkout_view'),
)
<commit_msg>Add line-ends to terminal url regexps.
- This ensures that parameters are not ignored if they do not match the
regexp.
Conflicts:
kirppu/app/urls.py<commit_after>from django.conf.urls import patterns, url
__author__ = 'jyrkila'
urlpatterns = patterns('kirppu.app.views',
url(r'^page/(?P<sid>\d+)/(?P<eid>\d+)$', 'get_items', name='page'),
url(r'^code/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_item_image', name='image'),
url(r'^commands/(?P<eid>\d+)$', 'get_commands', name='commands'),
url(r'^command/(?P<iid>\w+?)\.(?P<ext>\w+)$', 'get_command_image', name='command_image'),
url(r'^checkout/(?P<eid>\d+)$', 'checkout_view'),
)
|
d9d051b7a80025d76cfe0827f0bf632cfbd18972
|
app/handlers.py
|
app/handlers.py
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
Remove extra check of symlinks.
|
Remove extra check of symlinks.
|
Python
|
apache-2.0
|
pcinkh/fake-useragent-cache-server
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
Remove extra check of symlinks.
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
<commit_before>import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
<commit_msg>Remove extra check of symlinks.<commit_after>
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
Remove extra check of symlinks.import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
<commit_before>import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
<commit_msg>Remove extra check of symlinks.<commit_after>import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
64bf087f818e58bec8c39c03fb51b62f4253b2ad
|
settings.py
|
settings.py
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = '/home/pieter/projects/factors/data'
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
Make DATADIR absolute path agnostic
|
Make DATADIR absolute path agnostic
|
Python
|
mit
|
Oxylo/factors
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = '/home/pieter/projects/factors/data'
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
Make DATADIR absolute path agnostic
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
<commit_before>import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = '/home/pieter/projects/factors/data'
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
<commit_msg>Make DATADIR absolute path agnostic<commit_after>
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = '/home/pieter/projects/factors/data'
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
Make DATADIR absolute path agnosticimport os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
<commit_before>import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = '/home/pieter/projects/factors/data'
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
<commit_msg>Make DATADIR absolute path agnostic<commit_after>import os
LOWAGE = 15
UPAGE = 70
MAXAGE = 120
DATADIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
INFILE = 'lifedb.xls'
XLSWB = os.path.join(DATADIR, INFILE)
INSURANCE_IDS = ['OPLL', 'NPLL-B', 'NPLL-O',
'NPLLRS', 'NPTL-B', 'NPTL-O', 'ay_avg']
|
d39d922168a0918bce572049cd93844060a79b9a
|
awseed/test_iam_credentials_envar.py
|
awseed/test_iam_credentials_envar.py
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=AKIAJOVZ2DVGJKZAOJSQ \
# AWS_SECRET_ACCESS_KEY=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 \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=acb \
# AWS_SECRET_ACCESS_KEY=xyz \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
Remove AWS credential (IAM user have already been deleted from AWS)
|
Remove AWS credential (IAM user have already been deleted from AWS)
|
Python
|
mit
|
nyue/awseed,nyue/awseed
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=AKIAJOVZ2DVGJKZAOJSQ \
# AWS_SECRET_ACCESS_KEY=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 \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
Remove AWS credential (IAM user have already been deleted from AWS)
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=acb \
# AWS_SECRET_ACCESS_KEY=xyz \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
<commit_before># AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=AKIAJOVZ2DVGJKZAOJSQ \
# AWS_SECRET_ACCESS_KEY=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 \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
<commit_msg>Remove AWS credential (IAM user have already been deleted from AWS)<commit_after>
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=acb \
# AWS_SECRET_ACCESS_KEY=xyz \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=AKIAJOVZ2DVGJKZAOJSQ \
# AWS_SECRET_ACCESS_KEY=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 \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
Remove AWS credential (IAM user have already been deleted from AWS)# AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=acb \
# AWS_SECRET_ACCESS_KEY=xyz \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
<commit_before># AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=AKIAJOVZ2DVGJKZAOJSQ \
# AWS_SECRET_ACCESS_KEY=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 \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
<commit_msg>Remove AWS credential (IAM user have already been deleted from AWS)<commit_after># AWS_ACCESS_KEY_ID - AWS access key.
# AWS_SECRET_ACCESS_KEY - AWS secret key. Access and secret key variables override credentials stored in credential and config files.
# AWS_DEFAULT_REGION - AWS region. This variable overrides the default region of the in-use profile, if set.
#
# env AWS_ACCESS_KEY_ID=acb \
# AWS_SECRET_ACCESS_KEY=xyz \
# AWS_DEFAULT_REGION=ap-southeast-2 \
# python test_iam_credentials_envar.py
import boto3
s3 = boto3.resource('s3')
for bucket in s3.buckets.all():
print(bucket.name)
|
7fab2f02ddea20a790c4e6065b38229776c6b763
|
spam/tests/test_preprocess.py
|
spam/tests/test_preprocess.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
|
Add empty tests with descriptions.
|
Add empty tests with descriptions.
|
Python
|
mit
|
benigls/spam,benigls/spam
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
Add empty tests with descriptions.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
<commit_msg>Add empty tests with descriptions.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
Add empty tests with descriptions.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
<commit_msg>Add empty tests with descriptions.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
|
95a72d1f06c740b933983c2446b36bb450c4730e
|
ona_migration_script/migrate_toilet_codes.py
|
ona_migration_script/migrate_toilet_codes.py
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
parser.add_argument(
'--dryrun', '-d', action='store_true',
help='Print out changes instead of uploading them.')
args = parser.parse_args()
|
Add dryrun command line argument
|
Add dryrun command line argument
|
Python
|
bsd-3-clause
|
praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
Add dryrun command line argument
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
parser.add_argument(
'--dryrun', '-d', action='store_true',
help='Print out changes instead of uploading them.')
args = parser.parse_args()
|
<commit_before>import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
<commit_msg>Add dryrun command line argument<commit_after>
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
parser.add_argument(
'--dryrun', '-d', action='store_true',
help='Print out changes instead of uploading them.')
args = parser.parse_args()
|
import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
Add dryrun command line argumentimport argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
parser.add_argument(
'--dryrun', '-d', action='store_true',
help='Print out changes instead of uploading them.')
args = parser.parse_args()
|
<commit_before>import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
<commit_msg>Add dryrun command line argument<commit_after>import argparse
parser = argparse.ArgumentParser(description='Migrate toilet codes')
parser.add_argument(
'url', type=str,
help='The base URL for the django toilet database')
parser.add_argument(
'username', type=str, help='The username used to log in')
parser.add_argument(
'password', type=str, help='The password used to log in')
parser.add_argument(
'--dryrun', '-d', action='store_true',
help='Print out changes instead of uploading them.')
args = parser.parse_args()
|
cdb3a6f1a467c317817818a7df921dc168cacb4c
|
astropy/time/setup_package.py
|
astropy/time/setup_package.py
|
import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
|
import os
from distutils.extension import Extension
from astropy import setup_helpers
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
sources = [os.path.join(TIMEROOT, "sofa_time.pyx")]
include_dirs = ['numpy']
libraries = []
if setup_helpers.use_system_library('sofa'):
libraries.append('sofa_c')
else:
sources.append("cextern/sofa/sofa.c")
include_dirs.append('cextern/sofa')
time_ext = Extension(
name="astropy.time.sofa_time",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",)
return [time_ext]
def get_external_libraries():
return ['sofa']
|
Update astropy.time setup to allow using system sofa_c library
|
Update astropy.time setup to allow using system sofa_c library
|
Python
|
bsd-3-clause
|
lpsinger/astropy,saimn/astropy,tbabej/astropy,stargaser/astropy,lpsinger/astropy,DougBurke/astropy,StuartLittlefair/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,funbaker/astropy,pllim/astropy,kelle/astropy,mhvk/astropy,mhvk/astropy,kelle/astropy,tbabej/astropy,larrybradley/astropy,larrybradley/astropy,stargaser/astropy,lpsinger/astropy,pllim/astropy,aleksandr-bakanov/astropy,saimn/astropy,joergdietrich/astropy,funbaker/astropy,DougBurke/astropy,tbabej/astropy,AustereCuriosity/astropy,dhomeier/astropy,mhvk/astropy,dhomeier/astropy,DougBurke/astropy,mhvk/astropy,lpsinger/astropy,joergdietrich/astropy,pllim/astropy,MSeifert04/astropy,AustereCuriosity/astropy,dhomeier/astropy,tbabej/astropy,bsipocz/astropy,AustereCuriosity/astropy,bsipocz/astropy,kelle/astropy,larrybradley/astropy,dhomeier/astropy,joergdietrich/astropy,joergdietrich/astropy,mhvk/astropy,astropy/astropy,kelle/astropy,StuartLittlefair/astropy,astropy/astropy,pllim/astropy,StuartLittlefair/astropy,saimn/astropy,stargaser/astropy,funbaker/astropy,astropy/astropy,larrybradley/astropy,AustereCuriosity/astropy,astropy/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,MSeifert04/astropy,dhomeier/astropy,funbaker/astropy,MSeifert04/astropy,stargaser/astropy,AustereCuriosity/astropy,saimn/astropy,bsipocz/astropy,bsipocz/astropy,tbabej/astropy,StuartLittlefair/astropy,kelle/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,pllim/astropy,astropy/astropy,lpsinger/astropy,DougBurke/astropy,MSeifert04/astropy,saimn/astropy
|
import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
Update astropy.time setup to allow using system sofa_c library
|
import os
from distutils.extension import Extension
from astropy import setup_helpers
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
sources = [os.path.join(TIMEROOT, "sofa_time.pyx")]
include_dirs = ['numpy']
libraries = []
if setup_helpers.use_system_library('sofa'):
libraries.append('sofa_c')
else:
sources.append("cextern/sofa/sofa.c")
include_dirs.append('cextern/sofa')
time_ext = Extension(
name="astropy.time.sofa_time",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",)
return [time_ext]
def get_external_libraries():
return ['sofa']
|
<commit_before>import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
<commit_msg>Update astropy.time setup to allow using system sofa_c library<commit_after>
|
import os
from distutils.extension import Extension
from astropy import setup_helpers
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
sources = [os.path.join(TIMEROOT, "sofa_time.pyx")]
include_dirs = ['numpy']
libraries = []
if setup_helpers.use_system_library('sofa'):
libraries.append('sofa_c')
else:
sources.append("cextern/sofa/sofa.c")
include_dirs.append('cextern/sofa')
time_ext = Extension(
name="astropy.time.sofa_time",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",)
return [time_ext]
def get_external_libraries():
return ['sofa']
|
import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
Update astropy.time setup to allow using system sofa_c libraryimport os
from distutils.extension import Extension
from astropy import setup_helpers
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
sources = [os.path.join(TIMEROOT, "sofa_time.pyx")]
include_dirs = ['numpy']
libraries = []
if setup_helpers.use_system_library('sofa'):
libraries.append('sofa_c')
else:
sources.append("cextern/sofa/sofa.c")
include_dirs.append('cextern/sofa')
time_ext = Extension(
name="astropy.time.sofa_time",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",)
return [time_ext]
def get_external_libraries():
return ['sofa']
|
<commit_before>import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
<commit_msg>Update astropy.time setup to allow using system sofa_c library<commit_after>import os
from distutils.extension import Extension
from astropy import setup_helpers
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
sources = [os.path.join(TIMEROOT, "sofa_time.pyx")]
include_dirs = ['numpy']
libraries = []
if setup_helpers.use_system_library('sofa'):
libraries.append('sofa_c')
else:
sources.append("cextern/sofa/sofa.c")
include_dirs.append('cextern/sofa')
time_ext = Extension(
name="astropy.time.sofa_time",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",)
return [time_ext]
def get_external_libraries():
return ['sofa']
|
bb02ac7340fc939e1b2527cc079985a2eb021b3a
|
project/settings_prod.py
|
project/settings_prod.py
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
Debug in prod for a moment
|
Debug in prod for a moment
|
Python
|
mit
|
AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}Debug in prod for a moment
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
<commit_before>from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}<commit_msg>Debug in prod for a moment<commit_after>
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}Debug in prod for a momentfrom project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
<commit_before>from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}<commit_msg>Debug in prod for a moment<commit_after>from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
# Google cloud for static files
STATIC_URL = 'http://commondatastorage.googleapis.com/lobbyingph/'
# AWS s3 for static files
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/''
#AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
#AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
#AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
a3f5e1338cc84c60b867fc04175253f7ab460912
|
relay_api/api/backend.py
|
relay_api/api/backend.py
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
Add indent in json to improve debugging
|
Add indent in json to improve debugging
|
Python
|
mit
|
pahumadad/raspi-relay-api
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
Add indent in json to improve debugging
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
<commit_before>import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
<commit_msg>Add indent in json to improve debugging<commit_after>
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
Add indent in json to improve debuggingimport json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
<commit_before>import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
<commit_msg>Add indent in json to improve debugging<commit_after>import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
ffc7cd05ce824b8ec0aeee4e8f428a1c93710b08
|
db/db.py
|
db/db.py
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir,username), password)
def save():
aesjsonfile.dump("%s/%s.json"%(config.dbdir,username), self.db, password)
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir, self.username), self.password)
def save(self):
aesjsonfile.dump("%s/%s.json"%(config.dbdir, self.username), self.db, self.password)
def accountstodo(self):
return self.db["accounts"]
|
Fix bugs, add accountstodo method.
|
Fix bugs, add accountstodo method.
|
Python
|
agpl-3.0
|
vincebusam/pyWebCash,vincebusam/pyWebCash,vincebusam/pyWebCash
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir,username), password)
def save():
aesjsonfile.dump("%s/%s.json"%(config.dbdir,username), self.db, password)
Fix bugs, add accountstodo method.
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir, self.username), self.password)
def save(self):
aesjsonfile.dump("%s/%s.json"%(config.dbdir, self.username), self.db, self.password)
def accountstodo(self):
return self.db["accounts"]
|
<commit_before>import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir,username), password)
def save():
aesjsonfile.dump("%s/%s.json"%(config.dbdir,username), self.db, password)
<commit_msg>Fix bugs, add accountstodo method.<commit_after>
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir, self.username), self.password)
def save(self):
aesjsonfile.dump("%s/%s.json"%(config.dbdir, self.username), self.db, self.password)
def accountstodo(self):
return self.db["accounts"]
|
import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir,username), password)
def save():
aesjsonfile.dump("%s/%s.json"%(config.dbdir,username), self.db, password)
Fix bugs, add accountstodo method.import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir, self.username), self.password)
def save(self):
aesjsonfile.dump("%s/%s.json"%(config.dbdir, self.username), self.db, self.password)
def accountstodo(self):
return self.db["accounts"]
|
<commit_before>import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir,username), password)
def save():
aesjsonfile.dump("%s/%s.json"%(config.dbdir,username), self.db, password)
<commit_msg>Fix bugs, add accountstodo method.<commit_after>import sys
import aesjsonfile
sys.path.append("../")
import config
class DB(object):
def __init__(self, username, password):
self.username = username
self.password = password
self.db = aesjsonfile.load("%s/%s.json"%(config.dbdir, self.username), self.password)
def save(self):
aesjsonfile.dump("%s/%s.json"%(config.dbdir, self.username), self.db, self.password)
def accountstodo(self):
return self.db["accounts"]
|
2d98e2b738ffed183e8b5ec2e4e17753e6cf60c9
|
test/skills/scheduled_skills.py
|
test/skills/scheduled_skills.py
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
Correct test criteria for time format for scheduled skill.
|
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
|
Python
|
apache-2.0
|
forslund/mycroft-core,aatchison/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core,aatchison/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,linuxipho/mycroft-core,MycroftAI/mycroft-core,Dark5ide/mycroft-core
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
<commit_before>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
<commit_msg>Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
<commit_before>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
<commit_msg>Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
c908c943f66468f91cb8abb450bca36ead731885
|
test_app.py
|
test_app.py
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
Add test for signup success
|
Add test for signup success
|
Python
|
mit
|
mkiterian/bucket-list-app,mkiterian/bucket-list-app,mkiterian/bucket-list-app
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
Add test for signup success
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for signup success<commit_after>
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
Add test for signup successimport unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for signup success<commit_after>import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
71cc3cf500a9db7a96aa5f1a6c19c387cf0ad4ec
|
fickle/backend.py
|
fickle/backend.py
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2, random_state = 0):
if not self.loaded():
return
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = random_state
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.random_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2):
if not self.loaded():
return
self.random_id += 1
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = self.random_id
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
Validate with sequential random state
|
Validate with sequential random state
|
Python
|
mit
|
norbert/fickle
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2, random_state = 0):
if not self.loaded():
return
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = random_state
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
Validate with sequential random state
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.random_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2):
if not self.loaded():
return
self.random_id += 1
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = self.random_id
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
<commit_before>import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2, random_state = 0):
if not self.loaded():
return
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = random_state
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
<commit_msg>Validate with sequential random state<commit_after>
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.random_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2):
if not self.loaded():
return
self.random_id += 1
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = self.random_id
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2, random_state = 0):
if not self.loaded():
return
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = random_state
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
Validate with sequential random stateimport sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.random_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2):
if not self.loaded():
return
self.random_id += 1
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = self.random_id
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
<commit_before>import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2, random_state = 0):
if not self.loaded():
return
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = random_state
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
<commit_msg>Validate with sequential random state<commit_after>import sklearn.cross_validation
class Backend(object):
def __init__(self):
self.dataset_id = 0
self.random_id = 0
self.dataset = None
self.model = None
def load(self, dataset):
self.model = None
self.dataset_id += 1
self.dataset = dataset
self._data = dataset['data']
self._target = dataset['target']
return True
def loaded(self):
return (self.dataset is not None)
def fit(self):
if not self.loaded():
return False
model = self.classifier()
model.fit(self._data, self._target)
self.model = model
return True
def trained(self):
return (self.model is not None)
def validate(self, test_size = 0.2):
if not self.loaded():
return
self.random_id += 1
model = self.classifier()
X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split(
self._data, self._target, test_size = test_size, random_state = self.random_id
)
model.fit(X_train, y_train)
return [model.score(X_test, y_test)]
def predict(self, value):
return self.model.predict(value)
|
06dc2190d64e312b3b8285e69a0d50342bc55b46
|
tests/integration/test_proxy.py
|
tests/integration/test_proxy.py
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
Fix format string for Python 2.6
|
Fix format string for Python 2.6
|
Python
|
mit
|
kevin1024/vcrpy,graingert/vcrpy,kevin1024/vcrpy,graingert/vcrpy
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
Fix format string for Python 2.6
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
<commit_before># -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
<commit_msg>Fix format string for Python 2.6<commit_after>
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
Fix format string for Python 2.6# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
<commit_before># -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
<commit_msg>Fix format string for Python 2.6<commit_after># -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
fce1b1bdb5a39bbe57b750cd453a9697b8447d6b
|
chat.py
|
chat.py
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
Correct position of comment :)
|
Correct position of comment :)
|
Python
|
bsd-3-clause
|
arturosevilla/notification-server-example,arturosevilla/notification-server-example
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
Correct position of comment :)
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
<commit_before>import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
<commit_msg>Correct position of comment :)<commit_after>
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
Correct position of comment :)import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
<commit_before>import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
<commit_msg>Correct position of comment :)<commit_after>import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
|
2c90b0ca03c79cbba476897b8a2068e99cc6b2b1
|
restaurant/urls.py
|
restaurant/urls.py
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^account/login/$', auth_views.login, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
]
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.conf.urls import url
from views.index_view import index
from views.login_view import login_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/', index, name='index'),
url(r'^account/login/$', login_view, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset,
name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done,
name='password_reset_done'),
]
|
Change view for login url. Add url for index page
|
Change view for login url. Add url for index page
|
Python
|
mit
|
Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^account/login/$', auth_views.login, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
]
Change view for login url. Add url for index page
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.conf.urls import url
from views.index_view import index
from views.login_view import login_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/', index, name='index'),
url(r'^account/login/$', login_view, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset,
name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done,
name='password_reset_done'),
]
|
<commit_before>"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^account/login/$', auth_views.login, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
]
<commit_msg>Change view for login url. Add url for index page<commit_after>
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.conf.urls import url
from views.index_view import index
from views.login_view import login_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/', index, name='index'),
url(r'^account/login/$', login_view, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset,
name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done,
name='password_reset_done'),
]
|
"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^account/login/$', auth_views.login, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
]
Change view for login url. Add url for index page"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.conf.urls import url
from views.index_view import index
from views.login_view import login_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/', index, name='index'),
url(r'^account/login/$', login_view, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset,
name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done,
name='password_reset_done'),
]
|
<commit_before>"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^account/login/$', auth_views.login, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
]
<commit_msg>Change view for login url. Add url for index page<commit_after>"""restaurant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.conf.urls import url
from views.index_view import index
from views.login_view import login_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/', index, name='index'),
url(r'^account/login/$', login_view, name='login'),
url(r'^account/password_reset/$', auth_views.password_reset,
name='password_reset'),
url(r'^account/password_reset/done/$', auth_views.password_reset_done,
name='password_reset_done'),
]
|
f6a382a9a52ef2321c18ba63a2ece6930dadcf62
|
src/pybel/manager/__init__.py
|
src/pybel/manager/__init__.py
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
Add citation utils to init
|
Add citation utils to init
|
Python
|
mit
|
pybel/pybel,pybel/pybel,pybel/pybel
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
Add citation utils to init
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
<commit_before># -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
<commit_msg>Add citation utils to init<commit_after>
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
Add citation utils to init# -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
<commit_before># -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
<commit_msg>Add citation utils to init<commit_after># -*- coding: utf-8 -*-
"""
The :mod:`pybel.manager` module serves as an interface between the BEL graph data structure and underlying relational
databases. Its inclusion allows for the caching of namespaces and annotations for much faster lookup than
downloading and parsing upon each compilation.
"""
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
1c9b0185b98d1bfe06fb7bd565d255a1b4f23f96
|
test_output.py
|
test_output.py
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
Add a test for the tidy-url command
|
Add a test for the tidy-url command
|
Python
|
mit
|
alexwlchan/safari.rs,alexwlchan/safari.rs
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
if __name__ == '__main__':
unittest.main()
Add a test for the tidy-url command
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a test for the tidy-url command<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
if __name__ == '__main__':
unittest.main()
Add a test for the tidy-url command#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a test for the tidy-url command<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8
"""
These are tests of the external behaviour -- feature tests, if you like.
They run the compiled binaries, and make assertions about the return code,
stdout and stderr.
"""
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
f9fbb8331d6dc91773f686c57d41128edc6b80f9
|
f5_openstack_agent/lbaasv2/drivers/bigip/test/test__common_service_handler.py
|
f5_openstack_agent/lbaasv2/drivers/bigip/test/test__common_service_handler.py
|
import copy
import json
import mock
import os
from pprint import pprint as pp
import pytest
from pytest import symbols
import requests
from oslo_config import cfg
from f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver import\
iControlDriver
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
#pp(CREATE)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
original_create = copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
import copy
import json
import os
from pprint import pprint as pp
from pytest import symbols
import requests
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
Fix flake8 violations in test
|
Fix flake8 violations in test
|
Python
|
apache-2.0
|
F5Networks/f5-openstack-agent,richbrowne/f5-openstack-agent,richbrowne/f5-openstack-agent,richbrowne/f5-openstack-agent,F5Networks/f5-openstack-agent,F5Networks/f5-openstack-agent
|
import copy
import json
import mock
import os
from pprint import pprint as pp
import pytest
from pytest import symbols
import requests
from oslo_config import cfg
from f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver import\
iControlDriver
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
#pp(CREATE)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
original_create = copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
Fix flake8 violations in test
|
import copy
import json
import os
from pprint import pprint as pp
from pytest import symbols
import requests
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
<commit_before>import copy
import json
import mock
import os
from pprint import pprint as pp
import pytest
from pytest import symbols
import requests
from oslo_config import cfg
from f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver import\
iControlDriver
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
#pp(CREATE)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
original_create = copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
<commit_msg>Fix flake8 violations in test<commit_after>
|
import copy
import json
import os
from pprint import pprint as pp
from pytest import symbols
import requests
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
import copy
import json
import mock
import os
from pprint import pprint as pp
import pytest
from pytest import symbols
import requests
from oslo_config import cfg
from f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver import\
iControlDriver
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
#pp(CREATE)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
original_create = copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
Fix flake8 violations in testimport copy
import json
import os
from pprint import pprint as pp
from pytest import symbols
import requests
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
<commit_before>import copy
import json
import mock
import os
from pprint import pprint as pp
import pytest
from pytest import symbols
import requests
from oslo_config import cfg
from f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver import\
iControlDriver
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
#pp(CREATE)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
original_create = copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
<commit_msg>Fix flake8 violations in test<commit_after>import copy
import json
import os
from pprint import pprint as pp
from pytest import symbols
import requests
requests.packages.urllib3.disable_warnings()
opd = os.path.dirname
DISTRIBUTIONROOT = opd(opd(opd(opd(opd(opd(__file__))))))
del opd
SERVICELIBDIR = os.path.join(DISTRIBUTIONROOT,
'devtools',
'sample_data',
'service_library')
CREATE = json.load(open(os.path.join(SERVICELIBDIR, 'create.json'), 'r'))
def test__common_service_handler(bigip, neutronless_wrappedicontroldriver):
mgmt_rt = bigip
nless_wicd = neutronless_wrappedicontroldriver
print(type(symbols))
print(symbols.debug)
print(nless_wicd.hostnames)
partition_names = [x.name for x in mgmt_rt.tm.sys.folders.get_collection()]
pp(partition_names)
copy.deepcopy(CREATE)
nless_wicd._common_service_handler(CREATE)
try:
print('after _common_service_handler: %r' % CREATE['traffic_group'])
except KeyError:
pass
pp(nless_wicd.plugin_rpc.call_args_list)
|
bdb46e88fb9ee14b6c12d2b9aa5087cfe973492c
|
pontoon/base/__init__.py
|
pontoon/base/__init__.py
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
Make Mozilla Beta repositories special
|
Make Mozilla Beta repositories special
|
Python
|
bsd-3-clause
|
yfdyh000/pontoon,jotes/pontoon,participedia/pontoon,m8ttyB/pontoon,m8ttyB/pontoon,mastizada/pontoon,jotes/pontoon,mathjazz/pontoon,mozilla/pontoon,jotes/pontoon,yfdyh000/pontoon,mathjazz/pontoon,mastizada/pontoon,yfdyh000/pontoon,jotes/pontoon,sudheesh001/pontoon,yfdyh000/pontoon,mastizada/pontoon,sudheesh001/pontoon,mozilla/pontoon,mathjazz/pontoon,mathjazz/pontoon,sudheesh001/pontoon,participedia/pontoon,sudheesh001/pontoon,m8ttyB/pontoon,mathjazz/pontoon,mozilla/pontoon,mozilla/pontoon,participedia/pontoon,participedia/pontoon,mozilla/pontoon,mastizada/pontoon,m8ttyB/pontoon
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
Make Mozilla Beta repositories special
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
<commit_before>"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
<commit_msg>Make Mozilla Beta repositories special<commit_after>
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
Make Mozilla Beta repositories special"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
<commit_before>"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
<commit_msg>Make Mozilla Beta repositories special<commit_after>"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-beta/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
ed34dac136af052c849b35adacc7c95b2d82e00a
|
tests/test_content_type.py
|
tests/test_content_type.py
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert FormParser is negotiation.select_parser(
requestWithQueryParam, parsers)
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
parser = negotiation.select_parser(requestWithQueryParam, parsers)
assert parser.media_type == 'application/x-www-form-urlencoded'
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
Check media_type instead of class type
|
Check media_type instead of class type
The `parsers` list should contain instances, not classes.
|
Python
|
mit
|
hzdg/drf-url-content-type-override
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert FormParser is negotiation.select_parser(
requestWithQueryParam, parsers)
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
Check media_type instead of class type
The `parsers` list should contain instances, not classes.
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
parser = negotiation.select_parser(requestWithQueryParam, parsers)
assert parser.media_type == 'application/x-www-form-urlencoded'
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
<commit_before>import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert FormParser is negotiation.select_parser(
requestWithQueryParam, parsers)
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
<commit_msg>Check media_type instead of class type
The `parsers` list should contain instances, not classes.<commit_after>
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
parser = negotiation.select_parser(requestWithQueryParam, parsers)
assert parser.media_type == 'application/x-www-form-urlencoded'
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert FormParser is negotiation.select_parser(
requestWithQueryParam, parsers)
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
Check media_type instead of class type
The `parsers` list should contain instances, not classes.import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
parser = negotiation.select_parser(requestWithQueryParam, parsers)
assert parser.media_type == 'application/x-www-form-urlencoded'
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
<commit_before>import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert FormParser is negotiation.select_parser(
requestWithQueryParam, parsers)
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
<commit_msg>Check media_type instead of class type
The `parsers` list should contain instances, not classes.<commit_after>import pytest
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
factory = APIRequestFactory()
def test_content_type_override_query():
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
requestWithQueryParam = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/plain'))
parser = negotiation.select_parser(requestWithQueryParam, parsers)
assert parser.media_type == 'application/x-www-form-urlencoded'
requestWithoutQueryParam = Request(
factory.post('/', {'email': 'mmmmmm@test.com'},
content_type='text/plain'))
assert None is negotiation.select_parser(
requestWithoutQueryParam, parsers)
def test_limited_overrides():
"""
The content type shouldn't be overridden if the header is something other
than 'text/plain', or missing entirely.
"""
from rest_url_override_content_negotiation import \
URLOverrideContentNegotiation
negotiation = URLOverrideContentNegotiation()
parsers = (JSONParser, FormParser, MultiPartParser)
req = Request(
factory.post('/?content_type=application/x-www-form-urlencoded',
{'email': 'mmmmmm@test.com'},
content_type='text/somethingelse'))
assert negotiation.select_parser(req, parsers) is None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.