commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
83c68749910933cdb3a8be1a4fc2c50709f671a1 | admin/common_auth/forms.py | admin/common_auth/forms.py | from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| Add checkboxselectmultiple widget for admin form | Add checkboxselectmultiple widget for admin form
| Python | apache-2.0 | felliott/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,erinspace/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,icereval/osf.io,chennan47/osf.io,mfraezz/osf.io,adlius/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,icereval/osf.io,cslzchen/osf.io,saradbowman/osf.io,binoculars/osf.io,aaxelb/osf.io,cwisecarver/osf.io,felliott/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,chrisseto/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,chrisseto/osf.io,pattisdr/osf.io,mfraezz/osf.io,TomBaxter/osf.io,baylee-d/osf.io,chrisseto/osf.io,cslzchen/osf.io,cwisecarver/osf.io,aaxelb/osf.io,crcresearch/osf.io,cwisecarver/osf.io,caneruguz/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,caneruguz/osf.io,chrisseto/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,icereval/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,hmoco/osf.io,crcresearch/osf.io,leb2dg/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,mattclark/osf.io,hmoco/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,caneruguz/osf.io,leb2dg/osf.io,mfraezz/osf.io,sloria/osf.io,binoculars/osf.io,erinspace/osf.io,felliott/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,binoculars/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,adlius/osf.io,hmoco/osf.io,erinspace/osf.io,caseyrollins/osf.io,mattclark/osf.io,chennan47/osf.io,mfraezz/osf.io,chennan47/osf.io,sloria/osf.io | from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
Add checkboxselectmultiple widget for admin form | from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| <commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Add checkboxselectmultiple widget for admin form<commit_after> | from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
Add checkboxselectmultiple widget for admin formfrom __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| <commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Add checkboxselectmultiple widget for admin form<commit_after>from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
85c1a9e6dd9e4523d60638027da23fbfce7deff6 | stack/cluster.py | stack/cluster.py | from troposphere import (
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
| from troposphere import (
iam,
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
# ECS container role
container_instance_role = iam.Role(
"ContainerInstanceRole",
template=template,
AssumeRolePolicyDocument=dict(Statement=[dict(
Effect="Allow",
Principal=dict(Service=["ec2.amazonaws.com"]),
Action=["sts:AssumeRole"],
)]),
Path="/",
Policies=[
]
)
# ECS container instance profile
container_instance_profile = iam.InstanceProfile(
"ContainerInstanceProfile",
template=template,
Path="/",
Roles=[Ref(container_instance_role)],
)
| Add an instance profile for container instances | Add an instance profile for container instances
| Python | mit | caktus/aws-web-stacks,tobiasmcnulty/aws-container-basics | from troposphere import (
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
Add an instance profile for container instances | from troposphere import (
iam,
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
# ECS container role
container_instance_role = iam.Role(
"ContainerInstanceRole",
template=template,
AssumeRolePolicyDocument=dict(Statement=[dict(
Effect="Allow",
Principal=dict(Service=["ec2.amazonaws.com"]),
Action=["sts:AssumeRole"],
)]),
Path="/",
Policies=[
]
)
# ECS container instance profile
container_instance_profile = iam.InstanceProfile(
"ContainerInstanceProfile",
template=template,
Path="/",
Roles=[Ref(container_instance_role)],
)
| <commit_before>from troposphere import (
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
<commit_msg>Add an instance profile for container instances<commit_after> | from troposphere import (
iam,
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
# ECS container role
container_instance_role = iam.Role(
"ContainerInstanceRole",
template=template,
AssumeRolePolicyDocument=dict(Statement=[dict(
Effect="Allow",
Principal=dict(Service=["ec2.amazonaws.com"]),
Action=["sts:AssumeRole"],
)]),
Path="/",
Policies=[
]
)
# ECS container instance profile
container_instance_profile = iam.InstanceProfile(
"ContainerInstanceProfile",
template=template,
Path="/",
Roles=[Ref(container_instance_role)],
)
| from troposphere import (
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
Add an instance profile for container instancesfrom troposphere import (
iam,
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
# ECS container role
container_instance_role = iam.Role(
"ContainerInstanceRole",
template=template,
AssumeRolePolicyDocument=dict(Statement=[dict(
Effect="Allow",
Principal=dict(Service=["ec2.amazonaws.com"]),
Action=["sts:AssumeRole"],
)]),
Path="/",
Policies=[
]
)
# ECS container instance profile
container_instance_profile = iam.InstanceProfile(
"ContainerInstanceProfile",
template=template,
Path="/",
Roles=[Ref(container_instance_role)],
)
| <commit_before>from troposphere import (
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
<commit_msg>Add an instance profile for container instances<commit_after>from troposphere import (
iam,
Parameter,
Ref,
)
from troposphere.ecs import (
Cluster,
)
from .template import template
container_instance_type = Ref(template.add_parameter(Parameter(
"ContainerInstanceType",
Description="The container instance type",
Type="String",
Default="t2.micro",
AllowedValues=["t2.micro", "t2.small", "t2.medium"]
)))
template.add_mapping("ECSRegionMap", {
"eu-west-1": {"AMI": "ami-4e6ffe3d"},
"us-east-1": {"AMI": "ami-8f7687e2"},
"us-west-2": {"AMI": "ami-84b44de4"},
})
# ECS cluster
cluster = Cluster(
"Cluster",
template=template,
)
# ECS container role
container_instance_role = iam.Role(
"ContainerInstanceRole",
template=template,
AssumeRolePolicyDocument=dict(Statement=[dict(
Effect="Allow",
Principal=dict(Service=["ec2.amazonaws.com"]),
Action=["sts:AssumeRole"],
)]),
Path="/",
Policies=[
]
)
# ECS container instance profile
container_instance_profile = iam.InstanceProfile(
"ContainerInstanceProfile",
template=template,
Path="/",
Roles=[Ref(container_instance_role)],
)
|
b6742ef3f8d1888e46938b2c678bfb093b7a31f2 | pymortgage/d3_schedule.py | pymortgage/d3_schedule.py | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
if by_year:
d3_data.insert(0, self.add_year_key("balance"))
d3_data.insert(1, self.add_year_key("principal"))
d3_data.insert(2, self.add_year_key("interest"))
d3_data.insert(3, self.add_year_key("amount"))
else:
d3_data.insert(0, self.add_month_key("balance"))
d3_data.insert(1, self.add_month_key("principal"))
d3_data.insert(2, self.add_month_key("interest"))
d3_data.insert(3, self.add_month_key("amount"))
return json.dumps(d3_data)
def add_month_key(self, key):
return self.add_key(key, 'month')
def add_year_key(self, key):
return self.add_key(key, 'year')
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
keys = ['balance', 'principal', 'interest', 'amount']
if by_year:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'year'))
else:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'month'))
return json.dumps(d3_data)
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set | Put some recurring things into a loop to simply code. | Put some recurring things into a loop to simply code.
| Python | apache-2.0 | csutherl/pymortgage,csutherl/pymortgage,csutherl/pymortgage | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
if by_year:
d3_data.insert(0, self.add_year_key("balance"))
d3_data.insert(1, self.add_year_key("principal"))
d3_data.insert(2, self.add_year_key("interest"))
d3_data.insert(3, self.add_year_key("amount"))
else:
d3_data.insert(0, self.add_month_key("balance"))
d3_data.insert(1, self.add_month_key("principal"))
d3_data.insert(2, self.add_month_key("interest"))
d3_data.insert(3, self.add_month_key("amount"))
return json.dumps(d3_data)
def add_month_key(self, key):
return self.add_key(key, 'month')
def add_year_key(self, key):
return self.add_key(key, 'year')
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_setPut some recurring things into a loop to simply code. | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
keys = ['balance', 'principal', 'interest', 'amount']
if by_year:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'year'))
else:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'month'))
return json.dumps(d3_data)
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set | <commit_before>import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
if by_year:
d3_data.insert(0, self.add_year_key("balance"))
d3_data.insert(1, self.add_year_key("principal"))
d3_data.insert(2, self.add_year_key("interest"))
d3_data.insert(3, self.add_year_key("amount"))
else:
d3_data.insert(0, self.add_month_key("balance"))
d3_data.insert(1, self.add_month_key("principal"))
d3_data.insert(2, self.add_month_key("interest"))
d3_data.insert(3, self.add_month_key("amount"))
return json.dumps(d3_data)
def add_month_key(self, key):
return self.add_key(key, 'month')
def add_year_key(self, key):
return self.add_key(key, 'year')
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set<commit_msg>Put some recurring things into a loop to simply code.<commit_after> | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
keys = ['balance', 'principal', 'interest', 'amount']
if by_year:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'year'))
else:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'month'))
return json.dumps(d3_data)
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set | import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
if by_year:
d3_data.insert(0, self.add_year_key("balance"))
d3_data.insert(1, self.add_year_key("principal"))
d3_data.insert(2, self.add_year_key("interest"))
d3_data.insert(3, self.add_year_key("amount"))
else:
d3_data.insert(0, self.add_month_key("balance"))
d3_data.insert(1, self.add_month_key("principal"))
d3_data.insert(2, self.add_month_key("interest"))
d3_data.insert(3, self.add_month_key("amount"))
return json.dumps(d3_data)
def add_month_key(self, key):
return self.add_key(key, 'month')
def add_year_key(self, key):
return self.add_key(key, 'year')
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_setPut some recurring things into a loop to simply code.import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
keys = ['balance', 'principal', 'interest', 'amount']
if by_year:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'year'))
else:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'month'))
return json.dumps(d3_data)
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set | <commit_before>import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
if by_year:
d3_data.insert(0, self.add_year_key("balance"))
d3_data.insert(1, self.add_year_key("principal"))
d3_data.insert(2, self.add_year_key("interest"))
d3_data.insert(3, self.add_year_key("amount"))
else:
d3_data.insert(0, self.add_month_key("balance"))
d3_data.insert(1, self.add_month_key("principal"))
d3_data.insert(2, self.add_month_key("interest"))
d3_data.insert(3, self.add_month_key("amount"))
return json.dumps(d3_data)
def add_month_key(self, key):
return self.add_key(key, 'month')
def add_year_key(self, key):
return self.add_key(key, 'year')
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set<commit_msg>Put some recurring things into a loop to simply code.<commit_after>import json
class D3_Schedule:
def __init__(self, schedule):
self.schedule = schedule
def get_d3_schedule(self, by_year=None):
d3_data = []
keys = ['balance', 'principal', 'interest', 'amount']
if by_year:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'year'))
else:
for i in range(len(keys)):
d3_data.insert(i, self.add_key(keys[i], 'month'))
return json.dumps(d3_data)
# color would be added to the new set for each key
def add_key(self, key, term):
new_set = dict()
new_set['key'] = key.capitalize()
new_set['values'] = []
for item in self.schedule:
new_set['values'].append([item[term], item[key]])
return new_set |
cda63e96b042de04b3aa12348a411229e9b9d973 | tools/glidein_cat.py | tools/glidein_cat.py | #!/bin/env python
#
# glidein_cat
#
# Execute a cat command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
| #!/bin/env python
#
# glidein_cat.py
#
# Description:
# Execute a cat command on a condor job working directory
#
# Usage:
# glidein_cat.py <cluster>.<process> [<file>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
| Change rel paths into abspaths and use helper module | Change rel paths into abspaths and use helper module
| Python | bsd-3-clause | bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS | #!/bin/env python
#
# glidein_cat
#
# Execute a cat command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
Change rel paths into abspaths and use helper module | #!/bin/env python
#
# glidein_cat.py
#
# Description:
# Execute a cat command on a condor job working directory
#
# Usage:
# glidein_cat.py <cluster>.<process> [<file>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
| <commit_before>#!/bin/env python
#
# glidein_cat
#
# Execute a cat command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
<commit_msg>Change rel paths into abspaths and use helper module<commit_after> | #!/bin/env python
#
# glidein_cat.py
#
# Description:
# Execute a cat command on a condor job working directory
#
# Usage:
# glidein_cat.py <cluster>.<process> [<file>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
| #!/bin/env python
#
# glidein_cat
#
# Execute a cat command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
Change rel paths into abspaths and use helper module#!/bin/env python
#
# glidein_cat.py
#
# Description:
# Execute a cat command on a condor job working directory
#
# Usage:
# glidein_cat.py <cluster>.<process> [<file>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
| <commit_before>#!/bin/env python
#
# glidein_cat
#
# Execute a cat command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
<commit_msg>Change rel paths into abspaths and use helper module<commit_after>#!/bin/env python
#
# glidein_cat.py
#
# Description:
# Execute a cat command on a condor job working directory
#
# Usage:
# glidein_cat.py <cluster>.<process> [<file>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
|
c8f3b93b763189a3f420b2d91dd9fec3ba96b300 | catalog/serializers.py | catalog/serializers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', )
| Remove category slug from the API | Remove category slug from the API
| Python | agpl-3.0 | UrLab/beta402,UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/DocHub,UrLab/DocHub | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
Remove category slug from the API | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', )
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
<commit_msg>Remove category slug from the API<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', )
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
Remove category slug from the API# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', )
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
<commit_msg>Remove category slug from the API<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import serializers
from catalog.models import Course, Category
from documents.serializers import DocumentSerializer
from telepathy.serializers import SmallThreadSerializer
class CourseSerializer(serializers.HyperlinkedModelSerializer):
document_set = DocumentSerializer(many=True)
thread_set = SmallThreadSerializer(many=True)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'name', )
|
5c28e34a795f3dfd8eebdbeb2509525ce4195bba | subversion/bindings/swig/python/tests/core.py | subversion/bindings/swig/python/tests/core.py | import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| Add a regression test for the bug fixed in r28485. | Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields.
| Python | apache-2.0 | jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion | import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields. | import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| <commit_before>import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
<commit_msg>Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields.<commit_after> | import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields.import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| <commit_before>import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
<commit_msg>Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields.<commit_after>import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
|
e53c1d6b592784cf0d94f31aa798e7a4563a9164 | app/soc/views/helper/decorators.py | app/soc/views/helper/decorators.py | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | Remove not needed request argument in view decorator. | Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826
| Python | apache-2.0 | SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapperRemove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826 | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | <commit_before>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper<commit_msg>Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826<commit_after> | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapperRemove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | <commit_before>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper<commit_msg>Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826<commit_after>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper |
3272a507219b5ca8c3a498acd66db33432458767 | app/soc/views/helper/decorators.py | app/soc/views/helper/decorators.py | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | Remove not needed request argument in view decorator. | Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826
| Python | apache-2.0 | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapperRemove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826 | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | <commit_before>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper<commit_msg>Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826<commit_after> | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapperRemove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper | <commit_before>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper<commit_msg>Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
--HG--
extra : convert_revision : svn%3A32761e7d-7263-4528-b7be-7235b26367ec/trunk%40826<commit_after>#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper |
136f3725f4f90bef566ad43b740b341f69236bc5 | tools/snippets/test/fixtures/python/runner.py | tools/snippets/test/fixtures/python/runner.py | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
| Fix variable name in Python snippet | Fix variable name in Python snippet
| Python | apache-2.0 | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
Fix variable name in Python snippet | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
<commit_msg>Fix variable name in Python snippet<commit_after> | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
Fix variable name in Python snippet#!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
<commit_msg>Fix variable name in Python snippet<commit_after>#!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import math as m
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
# TODO: generate fixtures
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
gen(x, "TODO")
if __name__ == "__main__":
main()
|
2489ac6ce5a0229bbcee6e888f192eeca284106c | thinglang/parser/tokens/arithmetic.py | thinglang/parser/tokens/arithmetic.py | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.arguments = [slice[0], slice[2]]
self.operator = slice[1]
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self[0].evaluate(stack), self[1].evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] | Use argument list instead of lhs/rhs pari in ArithmeticOperation | Use argument list instead of lhs/rhs pari in ArithmeticOperation
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]Use argument list instead of lhs/rhs pari in ArithmeticOperation | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.arguments = [slice[0], slice[2]]
self.operator = slice[1]
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self[0].evaluate(stack), self[1].evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] | <commit_before>from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]<commit_msg>Use argument list instead of lhs/rhs pari in ArithmeticOperation<commit_after> | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.arguments = [slice[0], slice[2]]
self.operator = slice[1]
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self[0].evaluate(stack), self[1].evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] | from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]Use argument list instead of lhs/rhs pari in ArithmeticOperationfrom thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.arguments = [slice[0], slice[2]]
self.operator = slice[1]
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self[0].evaluate(stack), self[1].evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] | <commit_before>from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]<commit_msg>Use argument list instead of lhs/rhs pari in ArithmeticOperation<commit_after>from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.arguments = [slice[0], slice[2]]
self.operator = slice[1]
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self[0].evaluate(stack), self[1].evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item] |
6a007316bd3c7576e83076bee5d3236a1891a512 | messente/api/sms/api/__init__.py | messente/api/sms/api/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
| # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
__all__ = [
"error",
"Response",
"sms",
"credit",
"delivery",
"pricing",
"number_verification",
"verification_widget",
]
| Declare public interface for api.sms.api module | Declare public interface for api.sms.api module
| Python | apache-2.0 | messente/messente-python | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
Declare public interface for api.sms.api module | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
__all__ = [
"error",
"Response",
"sms",
"credit",
"delivery",
"pricing",
"number_verification",
"verification_widget",
]
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
<commit_msg>Declare public interface for api.sms.api module<commit_after> | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
__all__ = [
"error",
"Response",
"sms",
"credit",
"delivery",
"pricing",
"number_verification",
"verification_widget",
]
| # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
Declare public interface for api.sms.api module# -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
__all__ = [
"error",
"Response",
"sms",
"credit",
"delivery",
"pricing",
"number_verification",
"verification_widget",
]
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
<commit_msg>Declare public interface for api.sms.api module<commit_after># -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from messente.api.sms.api import error
from messente.api.sms.api.response import Response
# api modules
from messente.api.sms.api import sms
from messente.api.sms.api import credit
from messente.api.sms.api import delivery
from messente.api.sms.api import pricing
from messente.api.sms.api import number_verification
from messente.api.sms.api import verification_widget
__all__ = [
"error",
"Response",
"sms",
"credit",
"delivery",
"pricing",
"number_verification",
"verification_widget",
]
|
167108f64a7c9e8c910b9b0991ab8489f8e90866 | morse_modem.py | morse_modem.py | import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
print detect_tone(data)
print element_resolve(*detect_tone(data))
| import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
#print detect_tone(data)
element_resolve(*detect_tone(data))
| Remove extraneous print, comment out naked detect_tone call | Remove extraneous print, comment out naked detect_tone call
| Python | mit | nickodell/morse-code | import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
print detect_tone(data)
print element_resolve(*detect_tone(data))
Remove extraneous print, comment out naked detect_tone call | import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
#print detect_tone(data)
element_resolve(*detect_tone(data))
| <commit_before>import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
print detect_tone(data)
print element_resolve(*detect_tone(data))
<commit_msg>Remove extraneous print, comment out naked detect_tone call<commit_after> | import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
#print detect_tone(data)
element_resolve(*detect_tone(data))
| import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
print detect_tone(data)
print element_resolve(*detect_tone(data))
Remove extraneous print, comment out naked detect_tone callimport cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
#print detect_tone(data)
element_resolve(*detect_tone(data))
| <commit_before>import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
print detect_tone(data)
print element_resolve(*detect_tone(data))
<commit_msg>Remove extraneous print, comment out naked detect_tone call<commit_after>import cProfile
from cfg import *
from detect_tone import *
from gen_test import *
from element_resolve import *
if __name__ == "__main__":
#gen_test_data()
data = gen_test_data()
#print len(data)/SAMPLE_FREQ
#cProfile.run('detect_tone(data)')
#print detect_tone(data)
element_resolve(*detect_tone(data))
|
8ea4bb1acdc6df80620955c85c85fe71c74174d4 | highton/call_mixins/list_note_call_mixin.py | highton/call_mixins/list_note_call_mixin.py | from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'page': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
| from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'n': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
| Fix wrong pagination param in ListNoteCallMixin | Fix wrong pagination param in ListNoteCallMixin
see #17
| Python | apache-2.0 | seibert-media/Highton,seibert-media/Highton | from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'page': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
Fix wrong pagination param in ListNoteCallMixin
see #17 | from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'n': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
| <commit_before>from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'page': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
<commit_msg>Fix wrong pagination param in ListNoteCallMixin
see #17<commit_after> | from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'n': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
| from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'page': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
Fix wrong pagination param in ListNoteCallMixin
see #17from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'n': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
| <commit_before>from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'page': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
<commit_msg>Fix wrong pagination param in ListNoteCallMixin
see #17<commit_after>from highton.call_mixins import Call
from highton import fields
class ListNoteCallMixin(Call):
"""
A mixin to get all notes of inherited class
These could be: people || companies || kases || deals
"""
NOTES_OFFSET = 25
def list_notes(self, page=0, since=None):
"""
Get the notes of current object
:param page: the page starting at 0
:type since: int
:param since: get all notes since a datetime
:type since: datetime.datetime
:return: the notes
:rtype: list
"""
from highton.models.note import Note
params = {'n': int(page) * self.NOTES_OFFSET}
if since:
params['since'] = since.strftime(self.COLLECTION_DATETIME)
return fields.ListField(
name=self.ENDPOINT,
init_class=Note
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Note.ENDPOINT,
params=params
).text
)
)
|
92aa83091c6f32758fe99d703fbc77d7a640a222 | src/sentry/api/permissions.py | src/sentry/api/permissions.py | from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return object in Project.objects.get_for_user(user, access=access)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
| from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return any(1
for o in Project.objects.get_for_user(user, access=access)
if object == o
)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
| Use == for permission check | Use == for permission check
| Python | bsd-3-clause | BayanGroup/sentry,jean/sentry,llonchj/sentry,alexm92/sentry,zenefits/sentry,JackDanger/sentry,kevinastone/sentry,fuziontech/sentry,gencer/sentry,JackDanger/sentry,gg7/sentry,camilonova/sentry,gg7/sentry,llonchj/sentry,BayanGroup/sentry,boneyao/sentry,nicholasserra/sentry,ifduyue/sentry,fuziontech/sentry,mvaled/sentry,songyi199111/sentry,gencer/sentry,ngonzalvez/sentry,kevinastone/sentry,kevinlondon/sentry,imankulov/sentry,mvaled/sentry,TedaLIEz/sentry,fotinakis/sentry,boneyao/sentry,JamesMura/sentry,Kryz/sentry,hongliang5623/sentry,felixbuenemann/sentry,jokey2k/sentry,fuziontech/sentry,JamesMura/sentry,korealerts1/sentry,camilonova/sentry,1tush/sentry,jokey2k/sentry,fotinakis/sentry,vperron/sentry,Natim/sentry,daevaorn/sentry,mvaled/sentry,argonemyth/sentry,ewdurbin/sentry,ngonzalvez/sentry,BayanGroup/sentry,pauloschilling/sentry,fotinakis/sentry,gencer/sentry,felixbuenemann/sentry,alexm92/sentry,JamesMura/sentry,gencer/sentry,ifduyue/sentry,mvaled/sentry,gg7/sentry,llonchj/sentry,zenefits/sentry,mitsuhiko/sentry,BuildingLink/sentry,ifduyue/sentry,ngonzalvez/sentry,kevinlondon/sentry,TedaLIEz/sentry,mvaled/sentry,TedaLIEz/sentry,imankulov/sentry,ewdurbin/sentry,nicholasserra/sentry,drcapulet/sentry,vperron/sentry,JamesMura/sentry,wujuguang/sentry,zenefits/sentry,wujuguang/sentry,drcapulet/sentry,daevaorn/sentry,BuildingLink/sentry,JamesMura/sentry,jean/sentry,songyi199111/sentry,jean/sentry,wong2/sentry,beeftornado/sentry,korealerts1/sentry,vperron/sentry,argonemyth/sentry,looker/sentry,ewdurbin/sentry,1tush/sentry,Natim/sentry,JackDanger/sentry,daevaorn/sentry,wujuguang/sentry,JTCunning/sentry,BuildingLink/sentry,zenefits/sentry,looker/sentry,alexm92/sentry,ifduyue/sentry,daevaorn/sentry,drcapulet/sentry,BuildingLink/sentry,1tush/sentry,looker/sentry,imankulov/sentry,gencer/sentry,pauloschilling/sentry,looker/sentry,ifduyue/sentry,kevinastone/sentry,songyi199111/sentry,Kryz/sentry,zenefits/sentry,JTCunning/sentry,Natim/sentry,hongliang5623/sentry,beeftornado/sentry,Kryz/sentry,beeftornado/sentry,boneyao/sentry,jokey2k/sentry,looker/sentry,jean/sentry,pauloschilling/sentry,camilonova/sentry,JTCunning/sentry,kevinlondon/sentry,mvaled/sentry,felixbuenemann/sentry,BuildingLink/sentry,wong2/sentry,mitsuhiko/sentry,wong2/sentry,fotinakis/sentry,jean/sentry,nicholasserra/sentry,argonemyth/sentry,korealerts1/sentry,hongliang5623/sentry | from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return object in Project.objects.get_for_user(user, access=access)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
Use == for permission check | from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return any(1
for o in Project.objects.get_for_user(user, access=access)
if object == o
)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
| <commit_before>from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return object in Project.objects.get_for_user(user, access=access)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
<commit_msg>Use == for permission check<commit_after> | from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return any(1
for o in Project.objects.get_for_user(user, access=access)
if object == o
)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
| from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return object in Project.objects.get_for_user(user, access=access)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
Use == for permission checkfrom sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return any(1
for o in Project.objects.get_for_user(user, access=access)
if object == o
)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
| <commit_before>from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return object in Project.objects.get_for_user(user, access=access)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
<commit_msg>Use == for permission check<commit_after>from sentry.constants import MEMBER_USER
from sentry.models import Team, Project, User
class PermissionError(Exception):
pass
def has_perm(object, user, access=MEMBER_USER):
if user.is_superuser:
return True
# TODO: abstract this into a permission registry
if type(object) == User:
return object == user
if type(object) == Team:
return object.slug in Team.objects.get_for_user(user, access=access)
if hasattr(object, 'project'):
object = object.project
if type(object) == Project:
return any(1
for o in Project.objects.get_for_user(user, access=access)
if object == o
)
raise TypeError(type(object))
def assert_perm(*args, **kwargs):
if not has_perm(*args, **kwargs):
raise PermissionError
|
8b4c8d30e70134a422576178534d41ebc9a92c88 | telethon/events/messagedeleted.py | telethon/events/messagedeleted.py | from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
| from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
| Fix events.MessageDeleted setting readonly properties | Fix events.MessageDeleted setting readonly properties
| Python | mit | LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,expectocode/Telethon | from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
Fix events.MessageDeleted setting readonly properties | from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
| <commit_before>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
<commit_msg>Fix events.MessageDeleted setting readonly properties<commit_after> | from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
| from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
Fix events.MessageDeleted setting readonly propertiesfrom .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
| <commit_before>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
<commit_msg>Fix events.MessageDeleted setting readonly properties<commit_after>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
@staticmethod
def build(update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return event
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
0d16478f62bfb7c761f70475933772c812f9bdde | app/tests/test_fixtures.py | app/tests/test_fixtures.py | """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
def test_simple_app(app):
"""Verify basic application."""
assert app
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
| """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Flask # type: ignore
def test_simple_app(app):
"""Verify basic application."""
assert isinstance(app, Flask)
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
| Improve app test and silence pylint | chore(fixture-tests): Improve app test and silence pylint
| Python | mpl-2.0 | defrank/roshi | """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
def test_simple_app(app):
"""Verify basic application."""
assert app
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
chore(fixture-tests): Improve app test and silence pylint | """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Flask # type: ignore
def test_simple_app(app):
"""Verify basic application."""
assert isinstance(app, Flask)
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
| <commit_before>"""
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
def test_simple_app(app):
"""Verify basic application."""
assert app
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
<commit_msg>chore(fixture-tests): Improve app test and silence pylint<commit_after> | """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Flask # type: ignore
def test_simple_app(app):
"""Verify basic application."""
assert isinstance(app, Flask)
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
| """
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
def test_simple_app(app):
"""Verify basic application."""
assert app
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
chore(fixture-tests): Improve app test and silence pylint"""
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Flask # type: ignore
def test_simple_app(app):
"""Verify basic application."""
assert isinstance(app, Flask)
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
| <commit_before>"""
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
def test_simple_app(app):
"""Verify basic application."""
assert app
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
<commit_msg>chore(fixture-tests): Improve app test and silence pylint<commit_after>"""
Test fixtures.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Flask # type: ignore
def test_simple_app(app):
"""Verify basic application."""
assert isinstance(app, Flask)
def test_simple_config(config):
"""Verify basic application configuration."""
assert isinstance(config, dict)
def test_webdriver_current_url(webdriver):
"""
Verify data URL.
Chrome: 'data:,'
Firefox: 'about:blank'
"""
assert webdriver.current_url in ['data:,', 'about:blank']
def test_webdriver_valid_service(webdriver, services=('chrome', 'firefox')):
"""Make sure valid service is being used."""
assert webdriver.name in services
def test_webdriver_get_google(webdriver):
"""If google is down, something bad has happened."""
webdriver.get('http://google.com/')
assert 'Google' in webdriver.title
def test_page_proxies_webdriver(page):
"""Verify webdriver proxying."""
assert page.title == page.driver.title
assert page.current_url == page.driver.current_url
assert page.get == page.driver.get
|
dc2900ce180dbcd2b0a0f48e358c38fff67629e0 | rwt/tests/test_scripts.py | rwt/tests/test_scripts.py | from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
| from __future__ import unicode_literals
import textwrap
import sys
import subprocess
from rwt import scripts
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
class TestDepsReader:
def test_reads_files_with_attribute_assignment(self):
script = textwrap.dedent('''
__requires__=['foo']
x.a = 'bar'
''')
assert scripts.DepsReader(script).read() == ['foo']
| Add test capturing error when attribute assignment occurs in the top of the script | Add test capturing error when attribute assignment occurs in the top of the script
| Python | mit | jaraco/rwt | from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
Add test capturing error when attribute assignment occurs in the top of the script | from __future__ import unicode_literals
import textwrap
import sys
import subprocess
from rwt import scripts
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
class TestDepsReader:
def test_reads_files_with_attribute_assignment(self):
script = textwrap.dedent('''
__requires__=['foo']
x.a = 'bar'
''')
assert scripts.DepsReader(script).read() == ['foo']
| <commit_before>from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
<commit_msg>Add test capturing error when attribute assignment occurs in the top of the script<commit_after> | from __future__ import unicode_literals
import textwrap
import sys
import subprocess
from rwt import scripts
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
class TestDepsReader:
def test_reads_files_with_attribute_assignment(self):
script = textwrap.dedent('''
__requires__=['foo']
x.a = 'bar'
''')
assert scripts.DepsReader(script).read() == ['foo']
| from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
Add test capturing error when attribute assignment occurs in the top of the scriptfrom __future__ import unicode_literals
import textwrap
import sys
import subprocess
from rwt import scripts
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
class TestDepsReader:
def test_reads_files_with_attribute_assignment(self):
script = textwrap.dedent('''
__requires__=['foo']
x.a = 'bar'
''')
assert scripts.DepsReader(script).read() == ['foo']
| <commit_before>from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
<commit_msg>Add test capturing error when attribute assignment occurs in the top of the script<commit_after>from __future__ import unicode_literals
import textwrap
import sys
import subprocess
from rwt import scripts
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
class TestDepsReader:
def test_reads_files_with_attribute_assignment(self):
script = textwrap.dedent('''
__requires__=['foo']
x.a = 'bar'
''')
assert scripts.DepsReader(script).read() == ['foo']
|
c9e19580c6488a5d46bc1a63e32c223802683179 | openprocurement/auth/provider.py | openprocurement/auth/provider.py | # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev'):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.secret_key = secret
db.create_all()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
| # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev',
hash_secret_key='',
auction_client_id='',
auction_client_secret=''):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.debug = True
oauth_provider.secret_key = secret
oauth_provider.hash_secret_key = hash_secret_key
db.create_all()
if not openprocurement.auth.models.Client.query.get(auction_client_id):
item = openprocurement.auth.models.Client(
client_id=auction_client_id,
client_secret=auction_client_secret,
_redirect_uris=' '.join([
'http://localhost:',
'http://sapient.office.quintagroup.com',
'http://auction-sandbox.openprocurement.org',
]),
_default_scopes='email',
)
db.session.add(item)
db.session.commit()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
| Create CLIENT for auction on start | Create CLIENT for auction on start
| Python | apache-2.0 | openprocurement/openprocurement.auth,Leits/openprocurement.auth,openprocurement/openprocurement.auth,Leits/openprocurement.auth | # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev'):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.secret_key = secret
db.create_all()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
Create CLIENT for auction on start | # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev',
hash_secret_key='',
auction_client_id='',
auction_client_secret=''):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.debug = True
oauth_provider.secret_key = secret
oauth_provider.hash_secret_key = hash_secret_key
db.create_all()
if not openprocurement.auth.models.Client.query.get(auction_client_id):
item = openprocurement.auth.models.Client(
client_id=auction_client_id,
client_secret=auction_client_secret,
_redirect_uris=' '.join([
'http://localhost:',
'http://sapient.office.quintagroup.com',
'http://auction-sandbox.openprocurement.org',
]),
_default_scopes='email',
)
db.session.add(item)
db.session.commit()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
| <commit_before># coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev'):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.secret_key = secret
db.create_all()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
<commit_msg>Create CLIENT for auction on start<commit_after> | # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev',
hash_secret_key='',
auction_client_id='',
auction_client_secret=''):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.debug = True
oauth_provider.secret_key = secret
oauth_provider.hash_secret_key = hash_secret_key
db.create_all()
if not openprocurement.auth.models.Client.query.get(auction_client_id):
item = openprocurement.auth.models.Client(
client_id=auction_client_id,
client_secret=auction_client_secret,
_redirect_uris=' '.join([
'http://localhost:',
'http://sapient.office.quintagroup.com',
'http://auction-sandbox.openprocurement.org',
]),
_default_scopes='email',
)
db.session.add(item)
db.session.commit()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
| # coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev'):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.secret_key = secret
db.create_all()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
Create CLIENT for auction on start# coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev',
hash_secret_key='',
auction_client_id='',
auction_client_secret=''):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.debug = True
oauth_provider.secret_key = secret
oauth_provider.hash_secret_key = hash_secret_key
db.create_all()
if not openprocurement.auth.models.Client.query.get(auction_client_id):
item = openprocurement.auth.models.Client(
client_id=auction_client_id,
client_secret=auction_client_secret,
_redirect_uris=' '.join([
'http://localhost:',
'http://sapient.office.quintagroup.com',
'http://auction-sandbox.openprocurement.org',
]),
_default_scopes='email',
)
db.session.add(item)
db.session.commit()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
| <commit_before># coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev'):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.secret_key = secret
db.create_all()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
<commit_msg>Create CLIENT for auction on start<commit_after># coding: utf-8
from openprocurement.auth.provider_app import oauth_provider, db
import openprocurement.auth.models
import openprocurement.auth.views
def make_oath_provider_app(
global_conf,
sqlite='sqlite:///db.sqlite',
secret='abcdfg',
timezone='Europe/Kiev',
hash_secret_key='',
auction_client_id='',
auction_client_secret=''):
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': sqlite,
})
oauth_provider.debug = True
oauth_provider.secret_key = secret
oauth_provider.hash_secret_key = hash_secret_key
db.create_all()
if not openprocurement.auth.models.Client.query.get(auction_client_id):
item = openprocurement.auth.models.Client(
client_id=auction_client_id,
client_secret=auction_client_secret,
_redirect_uris=' '.join([
'http://localhost:',
'http://sapient.office.quintagroup.com',
'http://auction-sandbox.openprocurement.org',
]),
_default_scopes='email',
)
db.session.add(item)
db.session.commit()
return oauth_provider
if __name__ == '__main__':
db.create_all()
oauth_provider.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite',
})
# oauth_provider.debug = True
# oauth_provider.use_reloader = False
oauth_provider.run()
|
8c92c76d11b297e0b68b5f1b1711f462064fb33e | survey/urls.py | survey/urls.py | from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
| from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
# url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
| Remove survey2 route to close the survey | Remove survey2 route to close the survey
Close the survey for the Manchester researchers.
| Python | agpl-3.0 | mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey | from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
Remove survey2 route to close the survey
Close the survey for the Manchester researchers. | from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
# url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
| <commit_before>from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
<commit_msg>Remove survey2 route to close the survey
Close the survey for the Manchester researchers.<commit_after> | from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
# url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
| from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
Remove survey2 route to close the survey
Close the survey for the Manchester researchers.from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
# url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
| <commit_before>from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
<commit_msg>Remove survey2 route to close the survey
Close the survey for the Manchester researchers.<commit_after>from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
# comment out line below to close survey
# url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
url(r'^export2$', 'survey.views.export2', name='export2'),
)
|
f7dd16abcab5d5e0134083267f21672de8e3d5e1 | hc/front/context_processors.py | hc/front/context_processors.py | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
| from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| Remove site_root from template context, it's never used | Remove site_root from template context, it's never used
| Python | bsd-3-clause | iphoting/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
Remove site_root from template context, it's never used | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| <commit_before>from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
<commit_msg>Remove site_root from template context, it's never used<commit_after> | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
Remove site_root from template context, it's never usedfrom django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| <commit_before>from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
<commit_msg>Remove site_root from template context, it's never used<commit_after>from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
|
dcdbd0e0a9959c760d7465c748f29acd1b2e353e | tests/integration/test_structs.py | tests/integration/test_structs.py | from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.iter_all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
| from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
| Fix usage of iter_all_repos in integration tests | Fix usage of iter_all_repos in integration tests
| Python | bsd-3-clause | wbrefvem/github3.py,icio/github3.py,jim-minter/github3.py,sigmavirus24/github3.py,h4ck3rm1k3/github3.py,agamdua/github3.py,degustaf/github3.py,krxsky/github3.py,balloob/github3.py,ueg1990/github3.py,itsmemattchung/github3.py,christophelec/github3.py | from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.iter_all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
Fix usage of iter_all_repos in integration tests | from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
| <commit_before>from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.iter_all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
<commit_msg>Fix usage of iter_all_repos in integration tests<commit_after> | from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
| from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.iter_all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
Fix usage of iter_all_repos in integration testsfrom .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
| <commit_before>from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.iter_all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
<commit_msg>Fix usage of iter_all_repos in integration tests<commit_after>from .helper import IntegrationHelper
class TestGitHubIterator(IntegrationHelper):
def test_resets_etag(self):
cassette_name = self.cassette_name('resets_etag')
with self.recorder.use_cassette(cassette_name):
users_iter = self.gh.all_users(number=10)
assert users_iter.etag is None
next(users_iter) # Make the request
assert users_iter.etag is not None
users_iter.refresh()
assert users_iter.etag is None
|
7f31d1ba671627f28bd57b49242b275f38fdff31 | server.py | server.py | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) | Increase request buffer as workaround for stackoverflow in asyncio | Increase request buffer as workaround for stackoverflow in asyncio
| Python | mit | azzuwan/PyApiServerExample | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True)Increase request buffer as workaround for stackoverflow in asyncio | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) | <commit_before>from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True)<commit_msg>Increase request buffer as workaround for stackoverflow in asyncio<commit_after> | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) | from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True)Increase request buffer as workaround for stackoverflow in asynciofrom japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) | <commit_before>from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits=4096
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True)<commit_msg>Increase request buffer as workaround for stackoverflow in asyncio<commit_after>from japronto import Application
from services.articles import ArticleService
from mongoengine import *
article_service = ArticleService()
def index(req):
"""
The main index
"""
return req.Response(text='You reached the index!')
def articles(req):
"""
Get alll articles
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
docs = article_service.all()
return req.Response(text=docs.to_json())
def keywords(req):
"""
Retrieve articles by keywords
"""
#AsyncIO buffer problem
req.transport.set_write_buffer_limits(high=4096)
words = req.match_dict['keywords']
docs = article_service.keywords(words)
headers = {'Content-Type': 'application/json'}
body = docs.to_json().encode()
return req.Response(body=body, headers=headers)
app = Application()
app.router.add_route('/', index)
app.router.add_route('/articles', articles)
app.router.add_route('/articles/keywords/{keywords}', keywords)
#Some bugs require us to dial to MongoDB just before server is listening
host = 'mongodb://aws-ap-southeast-1-portal.0.dblayer.com/news'
connect(db='news',host=host, port=15501, username='azzuwan', password='Reddoor74', alias='default', connect=False)
app.run(debug=True) |
3a3adca2e5462a98c70a8624f880e35e497e5acc | server.py | server.py | import http.server
PORT = 8000
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
| import os
import http.server
PORT = int(os.environ.get("PORT", 8000))
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
| Set port depending on environment | Set port depending on environment
| Python | mit | Charlotteis/guestbook | import http.server
PORT = 8000
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
Set port depending on environment | import os
import http.server
PORT = int(os.environ.get("PORT", 8000))
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
| <commit_before>import http.server
PORT = 8000
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
<commit_msg>Set port depending on environment<commit_after> | import os
import http.server
PORT = int(os.environ.get("PORT", 8000))
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
| import http.server
PORT = 8000
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
Set port depending on environmentimport os
import http.server
PORT = int(os.environ.get("PORT", 8000))
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
| <commit_before>import http.server
PORT = 8000
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
<commit_msg>Set port depending on environment<commit_after>import os
import http.server
PORT = int(os.environ.get("PORT", 8000))
HOST = "127.0.0.1"
# This will display the site at `http://localhost:8000/`
server_address = (HOST, PORT)
# The CGIHTTPRequestHandler class allows us to run the cgi script in /cgi-bin/
# Rather than attempt to display the cgi file itself, which a 'BaseHTTPRequestHandler' or
# 'SimpleHTTPRequestHandler' may do
httpd = http.server.HTTPServer(server_address, http.server.CGIHTTPRequestHandler)
print("Starting my web server on port {0}".format(PORT))
# Make sure the server is always serving the content
# You can stop the server running using CTRL + C
httpd.serve_forever()
|
7e33e3ed495adc871a49f7217705a7d5710e7ed8 | cfgov/v1/templatetags/share.py | cfgov/v1/templatetags/share.py | import os
from django import template
from wagtail.wagtailcore.models import Page
from django.conf import settings
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if settings.STAGING_HOSTNAME not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
| import os
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if os.environ.get('STAGING_HOSTNAME') not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
| Fix one more place where STAGING_HOSTNAME uses settings | Fix one more place where STAGING_HOSTNAME uses settings
| Python | cc0-1.0 | kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh | import os
from django import template
from wagtail.wagtailcore.models import Page
from django.conf import settings
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if settings.STAGING_HOSTNAME not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
Fix one more place where STAGING_HOSTNAME uses settings | import os
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if os.environ.get('STAGING_HOSTNAME') not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
| <commit_before>import os
from django import template
from wagtail.wagtailcore.models import Page
from django.conf import settings
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if settings.STAGING_HOSTNAME not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
<commit_msg>Fix one more place where STAGING_HOSTNAME uses settings<commit_after> | import os
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if os.environ.get('STAGING_HOSTNAME') not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
| import os
from django import template
from wagtail.wagtailcore.models import Page
from django.conf import settings
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if settings.STAGING_HOSTNAME not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
Fix one more place where STAGING_HOSTNAME uses settingsimport os
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if os.environ.get('STAGING_HOSTNAME') not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
| <commit_before>import os
from django import template
from wagtail.wagtailcore.models import Page
from django.conf import settings
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if settings.STAGING_HOSTNAME not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
<commit_msg>Fix one more place where STAGING_HOSTNAME uses settings<commit_after>import os
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.filter
def is_shared(page):
page = page.specific
if isinstance(page, Page):
if page.shared:
return True
else:
return False
@register.assignment_tag(takes_context=True)
def staging_url(context, page):
url = page.url
if os.environ.get('STAGING_HOSTNAME') not in page.url:
url = url.replace(context['request'].site.hostname,
os.environ.get('STAGING_HOSTNAME'))
return url
@register.assignment_tag(takes_context=True)
def v1page_permissions(context, page):
page = page.specific
return page.permissions_for_user(context['request'].user)
|
fc30efcbea90835314be50e65608102fa538e55c | sri21_vmx_pvs_to_file.py | sri21_vmx_pvs_to_file.py | #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
write_pvs_to_file(argv.filename, mode_pvs)
print argv.filename
| #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
# File appears to be already sorted, so no need for next line
# sorted(mode_pvs)
write_pvs_to_file(argv.filename, mode_pvs)
| Clear unnecessary code, add comments on sorting | Clear unnecessary code, add comments on sorting
| Python | apache-2.0 | razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects | #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
write_pvs_to_file(argv.filename, mode_pvs)
print argv.filename
Clear unnecessary code, add comments on sorting | #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
# File appears to be already sorted, so no need for next line
# sorted(mode_pvs)
write_pvs_to_file(argv.filename, mode_pvs)
| <commit_before>#!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
write_pvs_to_file(argv.filename, mode_pvs)
print argv.filename
<commit_msg>Clear unnecessary code, add comments on sorting<commit_after> | #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
# File appears to be already sorted, so no need for next line
# sorted(mode_pvs)
write_pvs_to_file(argv.filename, mode_pvs)
| #!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
write_pvs_to_file(argv.filename, mode_pvs)
print argv.filename
Clear unnecessary code, add comments on sorting#!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
# File appears to be already sorted, so no need for next line
# sorted(mode_pvs)
write_pvs_to_file(argv.filename, mode_pvs)
| <commit_before>#!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
write_pvs_to_file(argv.filename, mode_pvs)
print argv.filename
<commit_msg>Clear unnecessary code, add comments on sorting<commit_after>#!/dls_sw/prod/tools/RHEL6-x86_64/defaults/bin/dls-python
from utilities import get_pv_names, write_pvs_to_file
import argparse
parser = argparse.ArgumentParser('optional named arguments')
parser.add_argument("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE", default = 'test.txt')
requiredArgv = parser.add_argument_group('required arguments')
requiredArgv.add_argument("-m", "--mode", dest="mode",
help="Machine MODE to use", metavar="MODE", required = True)
argv = parser.parse_args()
mode_pvs = get_pv_names(argv.mode)
# File appears to be already sorted, so no need for next line
# sorted(mode_pvs)
write_pvs_to_file(argv.filename, mode_pvs)
|
23e1efbd24e317e6571d8436fc414dae9a3da767 | salt/output/__init__.py | salt/output/__init__.py | '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
| '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
| Add function to outputter that returns the raw string to print | Add function to outputter that returns the raw string to print
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
Add function to outputter that returns the raw string to print | '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
| <commit_before>'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
<commit_msg>Add function to outputter that returns the raw string to print<commit_after> | '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
| '''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
Add function to outputter that returns the raw string to print'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
| <commit_before>'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
<commit_msg>Add function to outputter that returns the raw string to print<commit_after>'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
|
2d7974ac4895af5e7d2f5a627656bb3edbfa65a9 | config/config.py | config/config.py | def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
| def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
# Only signs with "-- RENDER --" on the last line will be shown
# Otherwise, people can't have secret bases and the render is too busy anyways.
def signFilter(poi):
if poi['id'] == 'Sign':
if poi['Text4'] == '-- RENDER --':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
| Add filter text to signs | Add filter text to signs
| Python | mit | mide/minecraft-overviewer,StefanBossbaly/minecraft-overviewer,StefanBossbaly/minecraft-overviewer,mide/minecraft-overviewer | def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
Add filter text to signs | def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
# Only signs with "-- RENDER --" on the last line will be shown
# Otherwise, people can't have secret bases and the render is too busy anyways.
def signFilter(poi):
if poi['id'] == 'Sign':
if poi['Text4'] == '-- RENDER --':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
| <commit_before>def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
<commit_msg>Add filter text to signs<commit_after> | def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
# Only signs with "-- RENDER --" on the last line will be shown
# Otherwise, people can't have secret bases and the render is too busy anyways.
def signFilter(poi):
if poi['id'] == 'Sign':
if poi['Text4'] == '-- RENDER --':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
| def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
Add filter text to signsdef playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
# Only signs with "-- RENDER --" on the last line will be shown
# Otherwise, people can't have secret bases and the render is too busy anyways.
def signFilter(poi):
if poi['id'] == 'Sign':
if poi['Text4'] == '-- RENDER --':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
| <commit_before>def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
<commit_msg>Add filter text to signs<commit_after>def playerIcons(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
# Only signs with "-- RENDER --" on the last line will be shown
# Otherwise, people can't have secret bases and the render is too busy anyways.
def signFilter(poi):
if poi['id'] == 'Sign':
if poi['Text4'] == '-- RENDER --':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
worlds['minecraft'] = "/home/minecraft/server/world"
outputdir = "/home/minecraft/render/"
markers = [
dict(name="Players", filterFunction=playerIcons),
dict(name="Signs", filterFunction=signFilter)
]
renders["day"] = {
'world': 'minecraft',
'title': 'Day',
'rendermode': 'smooth_lighting',
'markers': markers
}
renders["night"] = {
'world': 'minecraft',
'title': 'Night',
'rendermode': 'smooth_night',
'markers': markers
}
|
fb0eae3a9a760460f664adeef2ff71b2e8daac0f | twelve/env.py | twelve/env.py | import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
| import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def __repr__(self):
return "<twelve.Environment [{0}]>".format(",".join(self.values))
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
| Add a repr for twelve.Environment | Add a repr for twelve.Environment
| Python | bsd-3-clause | dstufft/twelve | import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
Add a repr for twelve.Environment | import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def __repr__(self):
return "<twelve.Environment [{0}]>".format(",".join(self.values))
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
| <commit_before>import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
<commit_msg>Add a repr for twelve.Environment<commit_after> | import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def __repr__(self):
return "<twelve.Environment [{0}]>".format(",".join(self.values))
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
| import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
Add a repr for twelve.Environmentimport os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def __repr__(self):
return "<twelve.Environment [{0}]>".format(",".join(self.values))
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
| <commit_before>import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
<commit_msg>Add a repr for twelve.Environment<commit_after>import os
import extensions
class Environment(object):
def __init__(self, adapter=None, environ=None, names=None, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
if names is None:
names = {}
self.adapter = adapter
self.environ = environ
self.names = names
self.values = {}
self._load_all()
def __getattr__(self, name):
return self.values.get(name)
def __repr__(self):
return "<twelve.Environment [{0}]>".format(",".join(self.values))
def _load_all(self):
# Load Services
self._load_services()
def _load_services(self):
for plugin in extensions.get(group="twelve.services"):
service = plugin.load()
value = service(
self.environ if self.environ is not None else os.environ,
self.names.get(plugin.name)
)
if self.adapter is not None:
adapters = list(extensions.get(group="twelve.adapters", name="{0}.{1}".format(self.adapter, plugin.name)))
if len(adapters):
adapter = adapters[0].load()
value = adapter(value)
self.values[plugin.name] = value
|
25bbe2cfc1b3b8f926176d83fbaa5c53bb85651a | tinysrt.py | tinysrt.py | #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
| #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_PATTERN = '''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+?)
'''
SUBTITLE_REGEX = re.compile(SUBTITLE_PATTERN, re.MULTILINE | re.DOTALL)
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
| Split out subtitle pattern from compilation phase | Split out subtitle pattern from compilation phase
| Python | mit | cdown/srt | #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
Split out subtitle pattern from compilation phase | #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_PATTERN = '''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+?)
'''
SUBTITLE_REGEX = re.compile(SUBTITLE_PATTERN, re.MULTILINE | re.DOTALL)
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
| <commit_before>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
<commit_msg>Split out subtitle pattern from compilation phase<commit_after> | #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_PATTERN = '''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+?)
'''
SUBTITLE_REGEX = re.compile(SUBTITLE_PATTERN, re.MULTILINE | re.DOTALL)
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
| #!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
Split out subtitle pattern from compilation phase#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_PATTERN = '''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+?)
'''
SUBTITLE_REGEX = re.compile(SUBTITLE_PATTERN, re.MULTILINE | re.DOTALL)
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
| <commit_before>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
<commit_msg>Split out subtitle pattern from compilation phase<commit_after>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_PATTERN = '''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+?)
'''
SUBTITLE_REGEX = re.compile(SUBTITLE_PATTERN, re.MULTILINE | re.DOTALL)
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
f479db0d829977766607d9131ddc85b1349c6f4a | userApp/tests.py | userApp/tests.py | from django.test import TestCase
# Create your tests here.
class BasicTestCase(TestCase):
"""Test getting various urls for user app"""
def test_getting_login(self):
self.client.get('/user/login')
def test_getting_register(self):
self.client.get('/user/register')
def test_getting_(self):
self.client.get('/alerts')
| from django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your tests here.
class TestUserFunction(TestCase):
"""Test getting various urls for user app"""
def setUp(self):
self.test_user = create_user()
def test_getting_login(self):
self.assertEqual(self.client.get('/user/login/').status_code, 200)
def test_getting_register(self):
self.assertEqual(self.client.get('/user/register/').status_code, 200)
def test_login(self):
self.assertFalse(self.client.login(username="test_user", password="wrong_password"))
self.assertFalse(self.client.login(username="nonexistant_user", password="password"))
self.assertTrue(self.client.login(username="test_user", password="password"))
def create_user():
return User.objects.create_user("test_user", email="user@bikemaps.org", password="password")
def create_superuser():
return User.objects.create_superuser("test_superuser", email="super_user@bikemaps.org", password="password")
| Add more thorough testing to user app | Add more thorough testing to user app
| Python | mit | SPARLab/BikeMaps,SPARLab/BikeMaps,SPARLab/BikeMaps | from django.test import TestCase
# Create your tests here.
class BasicTestCase(TestCase):
"""Test getting various urls for user app"""
def test_getting_login(self):
self.client.get('/user/login')
def test_getting_register(self):
self.client.get('/user/register')
def test_getting_(self):
self.client.get('/alerts')
Add more thorough testing to user app | from django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your tests here.
class TestUserFunction(TestCase):
"""Test getting various urls for user app"""
def setUp(self):
self.test_user = create_user()
def test_getting_login(self):
self.assertEqual(self.client.get('/user/login/').status_code, 200)
def test_getting_register(self):
self.assertEqual(self.client.get('/user/register/').status_code, 200)
def test_login(self):
self.assertFalse(self.client.login(username="test_user", password="wrong_password"))
self.assertFalse(self.client.login(username="nonexistant_user", password="password"))
self.assertTrue(self.client.login(username="test_user", password="password"))
def create_user():
return User.objects.create_user("test_user", email="user@bikemaps.org", password="password")
def create_superuser():
return User.objects.create_superuser("test_superuser", email="super_user@bikemaps.org", password="password")
| <commit_before>from django.test import TestCase
# Create your tests here.
class BasicTestCase(TestCase):
"""Test getting various urls for user app"""
def test_getting_login(self):
self.client.get('/user/login')
def test_getting_register(self):
self.client.get('/user/register')
def test_getting_(self):
self.client.get('/alerts')
<commit_msg>Add more thorough testing to user app<commit_after> | from django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your tests here.
class TestUserFunction(TestCase):
"""Test getting various urls for user app"""
def setUp(self):
self.test_user = create_user()
def test_getting_login(self):
self.assertEqual(self.client.get('/user/login/').status_code, 200)
def test_getting_register(self):
self.assertEqual(self.client.get('/user/register/').status_code, 200)
def test_login(self):
self.assertFalse(self.client.login(username="test_user", password="wrong_password"))
self.assertFalse(self.client.login(username="nonexistant_user", password="password"))
self.assertTrue(self.client.login(username="test_user", password="password"))
def create_user():
return User.objects.create_user("test_user", email="user@bikemaps.org", password="password")
def create_superuser():
return User.objects.create_superuser("test_superuser", email="super_user@bikemaps.org", password="password")
| from django.test import TestCase
# Create your tests here.
class BasicTestCase(TestCase):
"""Test getting various urls for user app"""
def test_getting_login(self):
self.client.get('/user/login')
def test_getting_register(self):
self.client.get('/user/register')
def test_getting_(self):
self.client.get('/alerts')
Add more thorough testing to user appfrom django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your tests here.
class TestUserFunction(TestCase):
"""Test getting various urls for user app"""
def setUp(self):
self.test_user = create_user()
def test_getting_login(self):
self.assertEqual(self.client.get('/user/login/').status_code, 200)
def test_getting_register(self):
self.assertEqual(self.client.get('/user/register/').status_code, 200)
def test_login(self):
self.assertFalse(self.client.login(username="test_user", password="wrong_password"))
self.assertFalse(self.client.login(username="nonexistant_user", password="password"))
self.assertTrue(self.client.login(username="test_user", password="password"))
def create_user():
return User.objects.create_user("test_user", email="user@bikemaps.org", password="password")
def create_superuser():
return User.objects.create_superuser("test_superuser", email="super_user@bikemaps.org", password="password")
| <commit_before>from django.test import TestCase
# Create your tests here.
class BasicTestCase(TestCase):
"""Test getting various urls for user app"""
def test_getting_login(self):
self.client.get('/user/login')
def test_getting_register(self):
self.client.get('/user/register')
def test_getting_(self):
self.client.get('/alerts')
<commit_msg>Add more thorough testing to user app<commit_after>from django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your tests here.
class TestUserFunction(TestCase):
"""Test getting various urls for user app"""
def setUp(self):
self.test_user = create_user()
def test_getting_login(self):
self.assertEqual(self.client.get('/user/login/').status_code, 200)
def test_getting_register(self):
self.assertEqual(self.client.get('/user/register/').status_code, 200)
def test_login(self):
self.assertFalse(self.client.login(username="test_user", password="wrong_password"))
self.assertFalse(self.client.login(username="nonexistant_user", password="password"))
self.assertTrue(self.client.login(username="test_user", password="password"))
def create_user():
return User.objects.create_user("test_user", email="user@bikemaps.org", password="password")
def create_superuser():
return User.objects.create_superuser("test_superuser", email="super_user@bikemaps.org", password="password")
|
28f9f7e85bb8353435db322138d1bd624934110f | london_commute_alert.py | london_commute_alert.py | import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
| import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
# While tube is on shuttle service, don't email
return
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
| Halt emails for time being | Halt emails for time being
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit | import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
Halt emails for time being | import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
# While tube is on shuttle service, don't email
return
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
| <commit_before>import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
<commit_msg>Halt emails for time being<commit_after> | import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
# While tube is on shuttle service, don't email
return
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
| import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
Halt emails for time beingimport datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
# While tube is on shuttle service, don't email
return
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
| <commit_before>import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
<commit_msg>Halt emails for time being<commit_after>import datetime
import os
import requests
import sys
def update(lines):
url = 'http://api.tfl.gov.uk/Line/Mode/tube/Status'
resp = requests.get(url).json()
result = []
for el in resp:
value = el['lineStatuses'][0]
state = value['statusSeverityDescription']
if el['id'] in lines and state != 'Good Service':
result.append('{}: {} ({})'.format(
el['id'].capitalize(), state, value['reason']))
return result
def email(delays):
# While tube is on shuttle service, don't email
return
os.chdir(sys.path[0])
with open('curl_raw_command.sh') as f:
raw_command = f.read()
# Running on PythonAnywhere - Monday to Sunday. Skip on the weekend
if delays and datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject='Tube delays for commute',
body='\n\n'.join(delays)))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
email(update(commute_lines))
if __name__ == '__main__':
main()
|
3efd847f8569a30b018925b39d1552a4aead6e8f | destroyer/destroyer.py | destroyer/destroyer.py | """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
| """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
| Update main module with facebook integration | Update main module with facebook integration
| Python | mit | jaredmichaelsmith/destroyer | """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
Update main module with facebook integration | """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
| <commit_before>"""destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
<commit_msg>Update main module with facebook integration<commit_after> | """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
| """destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
Update main module with facebook integration"""destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
| <commit_before>"""destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
<commit_msg>Update main module with facebook integration<commit_after>"""destroyer.py - Main module file for the application. Includes the code for
the command line interface."""
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
|
f9cb83b2279e00c8812895e1cc6b46438615f8ac | wafer/tests/test_menu.py | wafer/tests/test_menu.py | # -*- coding: utf-8 -*-
"""Tests for wafer menu utilities."""
from django.test import TestCase
from wafer.menu import Menu
class MenuTests(TestCase):
def test_mk_item_defaults(self):
self.assertEqual(Menu.mk_item(
u"My Label", u"http://example.com"
), {
"label": u"My Label", "url": u"http://example.com",
"sort_key": None, "image": None,
})
def test_mk_menu_defaults(self):
self.assertEqual(Menu.mk_menu("my-menu", u"My Menu", []), {
"menu": "my-menu", "label": u"My Menu",
"items": [], "sort_key": None
})
| Add tests for mk_item and mk_menu. | Add tests for mk_item and mk_menu.
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | Add tests for mk_item and mk_menu. | # -*- coding: utf-8 -*-
"""Tests for wafer menu utilities."""
from django.test import TestCase
from wafer.menu import Menu
class MenuTests(TestCase):
def test_mk_item_defaults(self):
self.assertEqual(Menu.mk_item(
u"My Label", u"http://example.com"
), {
"label": u"My Label", "url": u"http://example.com",
"sort_key": None, "image": None,
})
def test_mk_menu_defaults(self):
self.assertEqual(Menu.mk_menu("my-menu", u"My Menu", []), {
"menu": "my-menu", "label": u"My Menu",
"items": [], "sort_key": None
})
| <commit_before><commit_msg>Add tests for mk_item and mk_menu.<commit_after> | # -*- coding: utf-8 -*-
"""Tests for wafer menu utilities."""
from django.test import TestCase
from wafer.menu import Menu
class MenuTests(TestCase):
def test_mk_item_defaults(self):
self.assertEqual(Menu.mk_item(
u"My Label", u"http://example.com"
), {
"label": u"My Label", "url": u"http://example.com",
"sort_key": None, "image": None,
})
def test_mk_menu_defaults(self):
self.assertEqual(Menu.mk_menu("my-menu", u"My Menu", []), {
"menu": "my-menu", "label": u"My Menu",
"items": [], "sort_key": None
})
| Add tests for mk_item and mk_menu.# -*- coding: utf-8 -*-
"""Tests for wafer menu utilities."""
from django.test import TestCase
from wafer.menu import Menu
class MenuTests(TestCase):
def test_mk_item_defaults(self):
self.assertEqual(Menu.mk_item(
u"My Label", u"http://example.com"
), {
"label": u"My Label", "url": u"http://example.com",
"sort_key": None, "image": None,
})
def test_mk_menu_defaults(self):
self.assertEqual(Menu.mk_menu("my-menu", u"My Menu", []), {
"menu": "my-menu", "label": u"My Menu",
"items": [], "sort_key": None
})
| <commit_before><commit_msg>Add tests for mk_item and mk_menu.<commit_after># -*- coding: utf-8 -*-
"""Tests for wafer menu utilities."""
from django.test import TestCase
from wafer.menu import Menu
class MenuTests(TestCase):
def test_mk_item_defaults(self):
self.assertEqual(Menu.mk_item(
u"My Label", u"http://example.com"
), {
"label": u"My Label", "url": u"http://example.com",
"sort_key": None, "image": None,
})
def test_mk_menu_defaults(self):
self.assertEqual(Menu.mk_menu("my-menu", u"My Menu", []), {
"menu": "my-menu", "label": u"My Menu",
"items": [], "sort_key": None
})
| |
02da53951e48fd6b164d883cdf5c63c7b7f08049 | rmake_plugins/multinode_client/nodetypes.py | rmake_plugins/multinode_client/nodetypes.py | import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
| import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
| Use metaclasses to register node types. | Use metaclasses to register node types.
| Python | apache-2.0 | fedora-conary/rmake-2,fedora-conary/rmake-2,fedora-conary/rmake-2,fedora-conary/rmake-2 | import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
Use metaclasses to register node types. | import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
| <commit_before>import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
<commit_msg>Use metaclasses to register node types.<commit_after> | import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
| import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
Use metaclasses to register node types.import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
| <commit_before>import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
<commit_msg>Use metaclasses to register node types.<commit_after>import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
|
c0c67c14cb9c91c8cd07bfe6d013639121d1c5f7 | crm/tests/test_contact_user.py | crm/tests/test_contact_user.py | from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from login.tests.model_maker import make_user
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
contact = make_contact(
'pkimber',
'Patrick Kimber',
)
make_user_contact(make_user('fred'), contact)
user = User.objects.get(username='fred')
user_contacts = user.usercontact_set.all()
self.assertIn('Kimber', user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
fred = make_user('fred')
jsmith = make_contact('jsmith', 'John Smith')
pkimber = make_contact('pkimber', 'Patrick Kimber')
make_user_contact(fred, pkimber)
self.assertRaises(
IntegrityError,
make_user_contact,
fred,
jsmith,
)
| from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from crm.tests.scenario import (
contact_contractor,
)
from login.tests.scenario import (
get_fred,
get_sara,
user_contractor,
)
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
user_contractor()
contact_contractor()
user_contacts = get_fred().usercontact_set.all()
self.assertIn("Fred's Farm", user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
user_contractor()
contact_contractor()
self.assertRaises(
IntegrityError,
make_user_contact,
get_sara(),
make_contact('zoo', 'Bristol Zoo')
)
| Update test to use standard scenario | Update test to use standard scenario
| Python | apache-2.0 | pkimber/crm,pkimber/crm,pkimber/crm | from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from login.tests.model_maker import make_user
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
contact = make_contact(
'pkimber',
'Patrick Kimber',
)
make_user_contact(make_user('fred'), contact)
user = User.objects.get(username='fred')
user_contacts = user.usercontact_set.all()
self.assertIn('Kimber', user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
fred = make_user('fred')
jsmith = make_contact('jsmith', 'John Smith')
pkimber = make_contact('pkimber', 'Patrick Kimber')
make_user_contact(fred, pkimber)
self.assertRaises(
IntegrityError,
make_user_contact,
fred,
jsmith,
)
Update test to use standard scenario | from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from crm.tests.scenario import (
contact_contractor,
)
from login.tests.scenario import (
get_fred,
get_sara,
user_contractor,
)
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
user_contractor()
contact_contractor()
user_contacts = get_fred().usercontact_set.all()
self.assertIn("Fred's Farm", user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
user_contractor()
contact_contractor()
self.assertRaises(
IntegrityError,
make_user_contact,
get_sara(),
make_contact('zoo', 'Bristol Zoo')
)
| <commit_before>from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from login.tests.model_maker import make_user
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
contact = make_contact(
'pkimber',
'Patrick Kimber',
)
make_user_contact(make_user('fred'), contact)
user = User.objects.get(username='fred')
user_contacts = user.usercontact_set.all()
self.assertIn('Kimber', user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
fred = make_user('fred')
jsmith = make_contact('jsmith', 'John Smith')
pkimber = make_contact('pkimber', 'Patrick Kimber')
make_user_contact(fred, pkimber)
self.assertRaises(
IntegrityError,
make_user_contact,
fred,
jsmith,
)
<commit_msg>Update test to use standard scenario<commit_after> | from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from crm.tests.scenario import (
contact_contractor,
)
from login.tests.scenario import (
get_fred,
get_sara,
user_contractor,
)
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
user_contractor()
contact_contractor()
user_contacts = get_fred().usercontact_set.all()
self.assertIn("Fred's Farm", user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
user_contractor()
contact_contractor()
self.assertRaises(
IntegrityError,
make_user_contact,
get_sara(),
make_contact('zoo', 'Bristol Zoo')
)
| from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from login.tests.model_maker import make_user
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
contact = make_contact(
'pkimber',
'Patrick Kimber',
)
make_user_contact(make_user('fred'), contact)
user = User.objects.get(username='fred')
user_contacts = user.usercontact_set.all()
self.assertIn('Kimber', user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
fred = make_user('fred')
jsmith = make_contact('jsmith', 'John Smith')
pkimber = make_contact('pkimber', 'Patrick Kimber')
make_user_contact(fred, pkimber)
self.assertRaises(
IntegrityError,
make_user_contact,
fred,
jsmith,
)
Update test to use standard scenariofrom django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from crm.tests.scenario import (
contact_contractor,
)
from login.tests.scenario import (
get_fred,
get_sara,
user_contractor,
)
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
user_contractor()
contact_contractor()
user_contacts = get_fred().usercontact_set.all()
self.assertIn("Fred's Farm", user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
user_contractor()
contact_contractor()
self.assertRaises(
IntegrityError,
make_user_contact,
get_sara(),
make_contact('zoo', 'Bristol Zoo')
)
| <commit_before>from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from login.tests.model_maker import make_user
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
contact = make_contact(
'pkimber',
'Patrick Kimber',
)
make_user_contact(make_user('fred'), contact)
user = User.objects.get(username='fred')
user_contacts = user.usercontact_set.all()
self.assertIn('Kimber', user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
fred = make_user('fred')
jsmith = make_contact('jsmith', 'John Smith')
pkimber = make_contact('pkimber', 'Patrick Kimber')
make_user_contact(fred, pkimber)
self.assertRaises(
IntegrityError,
make_user_contact,
fred,
jsmith,
)
<commit_msg>Update test to use standard scenario<commit_after>from django.db import IntegrityError
from django.test import TestCase
from crm.tests.model_maker import (
make_contact,
make_user_contact,
)
from crm.tests.scenario import (
contact_contractor,
)
from login.tests.scenario import (
get_fred,
get_sara,
user_contractor,
)
class TestContactUser(TestCase):
def test_link_user_to_contact(self):
"""Create a contact and link it to a user"""
user_contractor()
contact_contractor()
user_contacts = get_fred().usercontact_set.all()
self.assertIn("Fred's Farm", user_contacts[0].contact.name)
def test_one_contact_per_user(self):
"""Make sure a user can only link to one contact"""
user_contractor()
contact_contractor()
self.assertRaises(
IntegrityError,
make_user_contact,
get_sara(),
make_contact('zoo', 'Bristol Zoo')
)
|
a20c88da5eb0b763072cc7bcba138983fe63ae31 | django_fsm_log/apps.py | django_fsm_log/apps.py | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| Solve warning coming from django 4.0 | Solve warning coming from django 4.0
| Python | mit | ticosax/django-fsm-log,gizmag/django-fsm-log | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
Solve warning coming from django 4.0 | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| <commit_before>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
<commit_msg>Solve warning coming from django 4.0<commit_after> | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
Solve warning coming from django 4.0from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| <commit_before>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
<commit_msg>Solve warning coming from django 4.0<commit_after>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
28126555aea9a78467dfcadbb2b14f9c640cdc6d | dwitter/templatetags/to_gravatar_url.py | dwitter/templatetags/to_gravatar_url.py | import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower()).hexdigest())
| import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
| Fix gravatar hashing error on py3 | Fix gravatar hashing error on py3
| Python | apache-2.0 | lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter | import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower()).hexdigest())
Fix gravatar hashing error on py3 | import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
| <commit_before>import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower()).hexdigest())
<commit_msg>Fix gravatar hashing error on py3<commit_after> | import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
| import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower()).hexdigest())
Fix gravatar hashing error on py3import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
| <commit_before>import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower()).hexdigest())
<commit_msg>Fix gravatar hashing error on py3<commit_after>import hashlib
from django import template
register = template.Library()
@register.filter
def to_gravatar_url(email):
return ('https://gravatar.com/avatar/%s?d=retro' %
hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
|
5a4fc9a89bfdb279ad0cda40f45b35ff3841c970 | voteswap/urls.py | voteswap/urls.py | """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
| """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.views import logout
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', logout, name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
| Fix logout view so django stops complaining | Fix logout view so django stops complaining
| Python | mit | sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap | """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
Fix logout view so django stops complaining | """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.views import logout
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', logout, name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
| <commit_before>"""voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
<commit_msg>Fix logout view so django stops complaining<commit_after> | """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.views import logout
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', logout, name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
| """voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
Fix logout view so django stops complaining"""voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.views import logout
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', logout, name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
| <commit_before>"""voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
<commit_msg>Fix logout view so django stops complaining<commit_after>"""voteswap URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.views import logout
from voteswap.views import index
from voteswap.views import landing_page
from voteswap.views import signup
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('', include('social.apps.django_app.urls', namespace='social')),
url('^home/$', index, name='index'),
url('^$', landing_page, name='landing_page'),
url('^logout/$', logout, name='logout'),
url('^user/', include('users.urls', namespace='users')),
url('^signup/$', signup, name='signup'),
]
|
bf269c03f93cf26630e67b3e44384eaf1235f808 | tests/test_20_message.py | tests/test_20_message.py |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
# check File init
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
# check file_handle reset
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
# run after reset
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
| Check more possibilitie for File. | Check more possibilitie for File.
| Python | apache-2.0 | ecmwf/cfgrib |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
Check more possibilitie for File. |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
# check File init
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
# check file_handle reset
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
# run after reset
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
| <commit_before>
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
<commit_msg>Check more possibilitie for File.<commit_after> |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
# check File init
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
# check file_handle reset
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
# run after reset
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
Check more possibilitie for File.
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
# check File init
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
# check file_handle reset
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
# run after reset
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
| <commit_before>
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
<commit_msg>Check more possibilitie for File.<commit_after>
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
import pytest
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_Message():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.Message(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
def test_File():
# check File init
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
# check file_handle reset
with pytest.raises(RuntimeError):
next(message.File(TEST_DATA))
# run after reset
with message.File(TEST_DATA) as res:
assert sum(1 for _ in res) == 72
|
3e86072667d486cb75e0cefca847bbdd2f032023 | charat2/views/guides.py | charat2/views/guides.py | import requests
def user_guide():
r = requests.get("http://drweeaboo.net/msparp/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
| import requests
def user_guide():
r = requests.get("https://karry.terminallycapricio.us/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
| Update the user guide URL. | Update the user guide URL.
| Python | agpl-3.0 | MSPARP/newparp,MSPARP/newparp,MSPARP/newparp | import requests
def user_guide():
r = requests.get("http://drweeaboo.net/msparp/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
Update the user guide URL. | import requests
def user_guide():
r = requests.get("https://karry.terminallycapricio.us/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
| <commit_before>import requests
def user_guide():
r = requests.get("http://drweeaboo.net/msparp/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
<commit_msg>Update the user guide URL.<commit_after> | import requests
def user_guide():
r = requests.get("https://karry.terminallycapricio.us/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
| import requests
def user_guide():
r = requests.get("http://drweeaboo.net/msparp/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
Update the user guide URL.import requests
def user_guide():
r = requests.get("https://karry.terminallycapricio.us/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
| <commit_before>import requests
def user_guide():
r = requests.get("http://drweeaboo.net/msparp/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
<commit_msg>Update the user guide URL.<commit_after>import requests
def user_guide():
r = requests.get("https://karry.terminallycapricio.us/userguide/duplicateguide.html")
r.encoding = r.apparent_encoding
return r.text, r.status_code
|
4b183fb87952404e5a71ffd5c52ea1bba5bfc2b9 | csv2ofx/mappings/stripe.py | csv2ofx/mappings/stripe.py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download All columns. (as well as custom).
The Default set does not include card information, so provides no appropriate value for the
PAYEE field for an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions identify a customer.
This mapping sets PAYEE to Customer Name if it exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description') if len(tr.get('customer_description')) > 0 else tr.get('card_name', ""),
'desc': itemgetter("description")
}
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download
All columns. (as well as custom). The Default set does not include card
information, so provides no appropriate value for the PAYEE field for
an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions
identify a customer. This mapping sets PAYEE to Customer Name if it
exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description')
if len(tr.get('customer_description')) > 0
else tr.get('card_name', ""),
'desc': itemgetter("description")
}
| Fix lint line length warnings (blocking manage checks) | Fix lint line length warnings (blocking manage checks)
| Python | mit | reubano/csv2ofx,reubano/csv2ofx | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download All columns. (as well as custom).
The Default set does not include card information, so provides no appropriate value for the
PAYEE field for an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions identify a customer.
This mapping sets PAYEE to Customer Name if it exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description') if len(tr.get('customer_description')) > 0 else tr.get('card_name', ""),
'desc': itemgetter("description")
}
Fix lint line length warnings (blocking manage checks) | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download
All columns. (as well as custom). The Default set does not include card
information, so provides no appropriate value for the PAYEE field for
an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions
identify a customer. This mapping sets PAYEE to Customer Name if it
exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description')
if len(tr.get('customer_description')) > 0
else tr.get('card_name', ""),
'desc': itemgetter("description")
}
| <commit_before># -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download All columns. (as well as custom).
The Default set does not include card information, so provides no appropriate value for the
PAYEE field for an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions identify a customer.
This mapping sets PAYEE to Customer Name if it exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description') if len(tr.get('customer_description')) > 0 else tr.get('card_name', ""),
'desc': itemgetter("description")
}
<commit_msg>Fix lint line length warnings (blocking manage checks)<commit_after> | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download
All columns. (as well as custom). The Default set does not include card
information, so provides no appropriate value for the PAYEE field for
an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions
identify a customer. This mapping sets PAYEE to Customer Name if it
exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description')
if len(tr.get('customer_description')) > 0
else tr.get('card_name', ""),
'desc': itemgetter("description")
}
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download All columns. (as well as custom).
The Default set does not include card information, so provides no appropriate value for the
PAYEE field for an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions identify a customer.
This mapping sets PAYEE to Customer Name if it exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description') if len(tr.get('customer_description')) > 0 else tr.get('card_name', ""),
'desc': itemgetter("description")
}
Fix lint line length warnings (blocking manage checks)# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download
All columns. (as well as custom). The Default set does not include card
information, so provides no appropriate value for the PAYEE field for
an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions
identify a customer. This mapping sets PAYEE to Customer Name if it
exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description')
if len(tr.get('customer_description')) > 0
else tr.get('card_name', ""),
'desc': itemgetter("description")
}
| <commit_before># -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download All columns. (as well as custom).
The Default set does not include card information, so provides no appropriate value for the
PAYEE field for an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions identify a customer.
This mapping sets PAYEE to Customer Name if it exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description') if len(tr.get('customer_description')) > 0 else tr.get('card_name', ""),
'desc': itemgetter("description")
}
<commit_msg>Fix lint line length warnings (blocking manage checks)<commit_after># -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
# pylint: disable=invalid-name
"""
csv2ofx.mappings.stripe
~~~~~~~~~~~~~~~~~~~~~~~~
Provides a mapping for transactions obtained via Stripe card processing
Note that Stripe provides a Default set of columns or you can download
All columns. (as well as custom). The Default set does not include card
information, so provides no appropriate value for the PAYEE field for
an anonymous transaction (missing a customer).
It's suggested the All Columns format be used if not all transactions
identify a customer. This mapping sets PAYEE to Customer Name if it
exists, otherwise Card Name (if provided)
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
mapping = {
'has_header': True,
'account': 'Stripe',
'id': itemgetter('id'),
'date': itemgetter('created'),
'amount': itemgetter('amount'),
'currency': itemgetter('currency'),
'payee': lambda tr: tr.get('customer_description')
if len(tr.get('customer_description')) > 0
else tr.get('card_name', ""),
'desc': itemgetter("description")
}
|
041a3bbd512d1800067bc12f522238d681c35ac4 | sheared/web/__init__.py | sheared/web/__init__.py | # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application']
| # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application', 'proxy']
| Add proxy module to __all__. | Add proxy module to __all__.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@248 5646265b-94b7-0310-9681-9501d24b2df7
| Python | mit | kirkeby/sheared | # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application']
Add proxy module to __all__.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@248 5646265b-94b7-0310-9681-9501d24b2df7 | # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application', 'proxy']
| <commit_before># vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application']
<commit_msg>Add proxy module to __all__.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@248 5646265b-94b7-0310-9681-9501d24b2df7<commit_after> | # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application', 'proxy']
| # vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application']
Add proxy module to __all__.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@248 5646265b-94b7-0310-9681-9501d24b2df7# vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application', 'proxy']
| <commit_before># vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application']
<commit_msg>Add proxy module to __all__.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@248 5646265b-94b7-0310-9681-9501d24b2df7<commit_after># vim:nowrap:textwidth=0
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__all__ = ['server', 'subserver', 'querystring', 'virtualhost',
'collection', 'error', 'entwiner', 'xmlrpc', 'resource',
'application', 'proxy']
|
3a57dfd7138be531fa265bea282eb7c62a391ac2 | bin/debug/load_timeline_for_day_and_user.py | bin/debug/load_timeline_for_day_and_user.py | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| Fix the --verbose argument to properly take an int | Fix the --verbose argument to properly take an int
Without this, the `i % args.verbose` check would fail since `args.verbose` was
a string
| Python | bsd-3-clause | e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
Fix the --verbose argument to properly take an int
Without this, the `i % args.verbose` check would fail since `args.verbose` was
a string | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| <commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
<commit_msg>Fix the --verbose argument to properly take an int
Without this, the `i % args.verbose` check would fail since `args.verbose` was
a string<commit_after> | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
Fix the --verbose argument to properly take an int
Without this, the `i % args.verbose` check would fail since `args.verbose` was
a stringimport json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| <commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
<commit_msg>Fix the --verbose argument to properly take an int
Without this, the `i % args.verbose` check would fail since `args.verbose` was
a string<commit_after>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
|
2828f056d0daabe613a5fe00584ab1bf699989c3 | bliski_publikator/monitorings/forms.py | bliski_publikator/monitorings/forms.py | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
| # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'logo', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
| Add monitoring logo in form | Add monitoring logo in form
| Python | mit | watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
Add monitoring logo in form | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'logo', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
| <commit_before># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
<commit_msg>Add monitoring logo in form<commit_after> | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'logo', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
| # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
Add monitoring logo in form# -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'logo', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
| <commit_before># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
<commit_msg>Add monitoring logo in form<commit_after># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import FormHorizontalMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from dal import autocomplete
from django import forms
from tinymce.widgets import TinyMCE
from django.utils.translation import ugettext_lazy as _
from ..institutions.models import Institution
from .models import Monitoring
from .utils import M2MFieldFormMixin
class MonitoringForm(UserKwargModelFormMixin, FormHorizontalMixin, SingleButtonMixin,
M2MFieldFormMixin, forms.ModelForm):
institutions = forms.ModelMultipleChoiceField(queryset=Institution.objects.all(),
label=_("Institutions"),
required=False,
widget=autocomplete.ModelSelect2Multiple(url='institutions:autocomplete'))
def __init__(self, *args, **kwargs):
super(MonitoringForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.fields['institutions'].initial = self.instance.institutions.all()
if not self.instance.user_id:
self.instance.user = self.user
def save(self, *args, **kwargs):
super(MonitoringForm, self).save(*args, **kwargs)
self.save_m2m_field(field='institutions',
left='monitoring',
right='institution')
return self.instance
class Meta:
model = Monitoring
fields = ['name', 'logo', 'description', 'active', 'max_point', ]
widgets = {
'description': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
|
994606d2641115f8af59657204d3d64f540bbfbd | data_structures/linked_list.py | data_structures/linked_list.py | class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
| class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
| Update magic methods, and reorg args. | Update magic methods, and reorg args.
| Python | mit | sjschmidt44/python_data_structures | class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
Update magic methods, and reorg args. | class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
| <commit_before>class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
<commit_msg>Update magic methods, and reorg args.<commit_after> | class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
| class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
Update magic methods, and reorg args.class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
| <commit_before>class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
<commit_msg>Update magic methods, and reorg args.<commit_after>class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
|
ad7bddb7fc4704893c0113bc48967ff3dd581e39 | demos/spritzer/settings.py | demos/spritzer/settings.py | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.CometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.BroadcastCometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 | Swap out CometHandler for BroadcastCometHandler in the Spritzer demo. | Swap out CometHandler for BroadcastCometHandler in the Spritzer demo.
| Python | bsd-3-clause | ericflo/hurricane,ericflo/hurricane | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.CometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200Swap out CometHandler for BroadcastCometHandler in the Spritzer demo. | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.BroadcastCometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 | <commit_before>import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.CometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200<commit_msg>Swap out CometHandler for BroadcastCometHandler in the Spritzer demo.<commit_after> | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.BroadcastCometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.CometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200Swap out CometHandler for BroadcastCometHandler in the Spritzer demo.import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.BroadcastCometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 | <commit_before>import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.CometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200<commit_msg>Swap out CometHandler for BroadcastCometHandler in the Spritzer demo.<commit_after>import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
HANDLERS = (
'spritzer.spritzer_handler.SpritzerHandler',
'hurricane.handlers.comet.BroadcastCometHandler',
)
APPLICATION_MANAGER = 'hurricane.managers.ipc'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
TWITTER_USERNAME = 'py_hurricane'
TWITTER_PASSWORD = 'djangoftw'
COMET_PORT = 8000
COMET_CACHE_SIZE = 200 |
7af339d68d31e402df3a70b6596927439de0f2aa | doc/mkapidoc.py | doc/mkapidoc.py | #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
| #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
| Hide Exscript.protocols.AbstractMethod from the API docs. | Hide Exscript.protocols.AbstractMethod from the API docs.
| Python | mit | maximumG/exscript,knipknap/exscript,maximumG/exscript,knipknap/exscript | #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
Hide Exscript.protocols.AbstractMethod from the API docs. | #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
| <commit_before>#!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
<commit_msg>Hide Exscript.protocols.AbstractMethod from the API docs.<commit_after> | #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
| #!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
Hide Exscript.protocols.AbstractMethod from the API docs.#!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
| <commit_before>#!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
<commit_msg>Hide Exscript.protocols.AbstractMethod from the API docs.<commit_after>#!/usr/bin/env python
# Generates the API documentation.
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.Interpreter',
'--exclude Exscript.helpers',
'--exclude Exscript.FunctionAction',
'--exclude Exscript.FooLib',
'--exclude Exscript.AccountManager',
'--exclude Exscript.stdlib',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.protocols.otp',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
f9730bbcb9c36c973f3eff431c3f39ff18dda666 | django/comicsite/templatetags/comic_filters.py | django/comicsite/templatetags/comic_filters.py | from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.modal_name, arg)
| from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
| Update for function name change in Django 1.8 | Update for function name change in Django 1.8
| Python | apache-2.0 | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.modal_name, arg)
Update for function name change in Django 1.8 | from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
| <commit_before>from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.modal_name, arg)
<commit_msg>Update for function name change in Django 1.8<commit_after> | from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
| from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.modal_name, arg)
Update for function name change in Django 1.8from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
| <commit_before>from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.modal_name, arg)
<commit_msg>Update for function name change in Django 1.8<commit_after>from django import template
register = template.Library()
"""
Copied these from django/contrib/admin/templates/templatetags/admin_urls.
These are utility functions for generating urls to admin pages.
I want to extend the standard /admin url to always include the current project,
designated by /site/<projectname>/admin.
"""
@register.filter
def project_admin_urlname(value, arg):
return 'projectadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
|
23edca2a2a87ca0d96becd92a0bf930cc6c33b6f | alltheitems/world.py | alltheitems/world.py | import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
| import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
| Fix API method names called by World.block_at | Fix API method names called by World.block_at
| Python | mit | wurstmineberg/alltheitems.wurstmineberg.de,wurstmineberg/alltheitems.wurstmineberg.de | import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
Fix API method names called by World.block_at | import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
| <commit_before>import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
<commit_msg>Fix API method names called by World.block_at<commit_after> | import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
| import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
Fix API method names called by World.block_atimport alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
| <commit_before>import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
<commit_msg>Fix API method names called by World.block_at<commit_after>import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
|
8a10dbe86f6ce02af1884fc9e68aa925003d9ad7 | pynder/session.py | pynder/session.py | from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
| from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
response = self._api.recs()
users = response['results'] if 'results' in response else []
return [models.Hopeful(u, self) for u in users]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
| Handle no nearby users gracefully. | Handle no nearby users gracefully.
| Python | mit | rforgione/pynder | from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
Handle no nearby users gracefully. | from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
response = self._api.recs()
users = response['results'] if 'results' in response else []
return [models.Hopeful(u, self) for u in users]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
| <commit_before>from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
<commit_msg>Handle no nearby users gracefully.<commit_after> | from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
response = self._api.recs()
users = response['results'] if 'results' in response else []
return [models.Hopeful(u, self) for u in users]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
| from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
Handle no nearby users gracefully.from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
response = self._api.recs()
users = response['results'] if 'results' in response else []
return [models.Hopeful(u, self) for u in users]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
| <commit_before>from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
<commit_msg>Handle no nearby users gracefully.<commit_after>from time import time
from datetime import timedelta
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token, proxies=None):
self._api = api.TinderAPI(proxies)
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
response = self._api.recs()
users = response['results'] if 'results' in response else []
return [models.Hopeful(u, self) for u in users]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
@property
def likes_remaining(self):
meta_dct = self._api.meta()
return meta_dct['rating']['likes_remaining']
@property
def can_like_in(self):
'''
Return the number of seconds before being allowed to issue likes
'''
meta_dct = self._api.meta()
now = int(time())
limited_until = meta_dct['rating'].get('rate_limited_until', now) # Milliseconds
return limited_until / 1000 - now
|
0383ee9511a4b002fbb3c00b3fc6812e8cc6bf1e | test/integration/fixture_server.py | test/integration/fixture_server.py | """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog.routing.decorator import Router
router = Router()
@router.route("/")
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route("/foo")
def foo_app(environ, start_response):
"""Foo application. Outputs 'foobar!'"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
| """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog import Router
router = Router()
@router.route('/')
def simple_app(environ, start_response):
"""
Simplest possible application object
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route('/foo')
def foo_app(environ, start_response):
"""
Foo application. Outputs 'foobar!'
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
@router.route('/environ')
def environ_app(environ, start_response):
"""
Outputs the full server environ passed in to the request
"""
response_body = ['%s: %s' % (key, value)
for key, value in sorted(environ.items())]
response_body = '\n'.join(response_body)
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
| Update fixture server with environ route. | Update fixture server with environ route.
Minor changes to docstrings and change in quotation usage.
Router imported from aragog, not routing.decorator.
| Python | apache-2.0 | bramwelt/aragog | """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog.routing.decorator import Router
router = Router()
@router.route("/")
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route("/foo")
def foo_app(environ, start_response):
"""Foo application. Outputs 'foobar!'"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
Update fixture server with environ route.
Minor changes to docstrings and change in quotation usage.
Router imported from aragog, not routing.decorator. | """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog import Router
router = Router()
@router.route('/')
def simple_app(environ, start_response):
"""
Simplest possible application object
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route('/foo')
def foo_app(environ, start_response):
"""
Foo application. Outputs 'foobar!'
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
@router.route('/environ')
def environ_app(environ, start_response):
"""
Outputs the full server environ passed in to the request
"""
response_body = ['%s: %s' % (key, value)
for key, value in sorted(environ.items())]
response_body = '\n'.join(response_body)
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
| <commit_before>"""
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog.routing.decorator import Router
router = Router()
@router.route("/")
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route("/foo")
def foo_app(environ, start_response):
"""Foo application. Outputs 'foobar!'"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
<commit_msg>Update fixture server with environ route.
Minor changes to docstrings and change in quotation usage.
Router imported from aragog, not routing.decorator.<commit_after> | """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog import Router
router = Router()
@router.route('/')
def simple_app(environ, start_response):
"""
Simplest possible application object
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route('/foo')
def foo_app(environ, start_response):
"""
Foo application. Outputs 'foobar!'
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
@router.route('/environ')
def environ_app(environ, start_response):
"""
Outputs the full server environ passed in to the request
"""
response_body = ['%s: %s' % (key, value)
for key, value in sorted(environ.items())]
response_body = '\n'.join(response_body)
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
| """
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog.routing.decorator import Router
router = Router()
@router.route("/")
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route("/foo")
def foo_app(environ, start_response):
"""Foo application. Outputs 'foobar!'"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
Update fixture server with environ route.
Minor changes to docstrings and change in quotation usage.
Router imported from aragog, not routing.decorator."""
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog import Router
router = Router()
@router.route('/')
def simple_app(environ, start_response):
"""
Simplest possible application object
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route('/foo')
def foo_app(environ, start_response):
"""
Foo application. Outputs 'foobar!'
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
@router.route('/environ')
def environ_app(environ, start_response):
"""
Outputs the full server environ passed in to the request
"""
response_body = ['%s: %s' % (key, value)
for key, value in sorted(environ.items())]
response_body = '\n'.join(response_body)
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
| <commit_before>"""
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog.routing.decorator import Router
router = Router()
@router.route("/")
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route("/foo")
def foo_app(environ, start_response):
"""Foo application. Outputs 'foobar!'"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
<commit_msg>Update fixture server with environ route.
Minor changes to docstrings and change in quotation usage.
Router imported from aragog, not routing.decorator.<commit_after>"""
Tests against the URL Router
"""
from wsgiref.simple_server import make_server
from aragog import Router
router = Router()
@router.route('/')
def simple_app(environ, start_response):
"""
Simplest possible application object
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["hello, world!\n"]
@router.route('/foo')
def foo_app(environ, start_response):
"""
Foo application. Outputs 'foobar!'
"""
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ["foobar!\n"]
@router.route('/environ')
def environ_app(environ, start_response):
"""
Outputs the full server environ passed in to the request
"""
response_body = ['%s: %s' % (key, value)
for key, value in sorted(environ.items())]
response_body = '\n'.join(response_body)
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
if __name__ == "__main__":
httpd = make_server('', 8080, router)
print "Server started on 8080."
httpd.serve_forever()
|
d7ebf5c6db9b73133915aabb3dbd9c5b283f9982 | ooni/tests/test_trueheaders.py | ooni/tests/test_trueheaders.py | from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
| from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
| Fix unittest for true headers.. | Fix unittest for true headers..
| Python | bsd-2-clause | kdmurray91/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe | from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
Fix unittest for true headers.. | from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
| <commit_before>from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
<commit_msg>Fix unittest for true headers..<commit_after> | from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
| from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
Fix unittest for true headers..from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
| <commit_before>from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
<commit_msg>Fix unittest for true headers..<commit_after>from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
|
2536d58d1119bd2304f5c16f1109e42314595f65 | scripts/cat.py | scripts/cat.py | #!/usr/bin/env python
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
| #!/usr/bin/env python3
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
| Fix build on systems without a "python" executable | Fix build on systems without a "python" executable
This script is technically 2/3 compatible, but 3 is required to build anyway,
so whatever.
| Python | isc | drobilla/pugl,drobilla/pugl,drobilla/pugl | #!/usr/bin/env python
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
Fix build on systems without a "python" executable
This script is technically 2/3 compatible, but 3 is required to build anyway,
so whatever. | #!/usr/bin/env python3
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
| <commit_before>#!/usr/bin/env python
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
<commit_msg>Fix build on systems without a "python" executable
This script is technically 2/3 compatible, but 3 is required to build anyway,
so whatever.<commit_after> | #!/usr/bin/env python3
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
| #!/usr/bin/env python
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
Fix build on systems without a "python" executable
This script is technically 2/3 compatible, but 3 is required to build anyway,
so whatever.#!/usr/bin/env python3
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
| <commit_before>#!/usr/bin/env python
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
<commit_msg>Fix build on systems without a "python" executable
This script is technically 2/3 compatible, but 3 is required to build anyway,
so whatever.<commit_after>#!/usr/bin/env python3
# Copyright 2021 David Robillard <d@drobilla.net>
# SPDX-License-Identifier: ISC
import sys
for filename in sys.argv[1:]:
with open(filename, 'r') as f:
sys.stdout.write(f.read())
|
189353e4eb110facbabf9882e0af1ef16ced600f | openstack/tests/functional/network/v2/test_quota.py | openstack/tests/functional/network/v2/test_quota.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
PROJECT_NAME = 'project-' + uuid.uuid4().hex
PROJECT = None
@classmethod
def setUpClass(cls):
super(TestQuota, cls).setUpClass()
# Need a project to have a quota
cls.PROJECT = cls.conn.identity.create_project(name=cls.PROJECT_NAME)
@classmethod
def tearDownClass(cls):
cls.conn.identity.delete_project(cls.PROJECT.id)
def test_list(self):
qot = self.conn.network.quotas().next()
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
project_quota = self.conn.network.quotas().next()
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| Fix network quota test so it works on gate | Fix network quota test so it works on gate
The gate does not create quotas by default, but devstack does.
This test is not important enough to make work for the gate which
would probably require some reconfiguration, but it is nice to
have it for devstack.
Change-Id: I6618b5ee8c1dde7773b83e8ba97092f30d595e8a
Partial-bug: #1665495
| Python | apache-2.0 | stackforge/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk,briancurtin/python-openstacksdk,briancurtin/python-openstacksdk,dtroyer/python-openstacksdk | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
PROJECT_NAME = 'project-' + uuid.uuid4().hex
PROJECT = None
@classmethod
def setUpClass(cls):
super(TestQuota, cls).setUpClass()
# Need a project to have a quota
cls.PROJECT = cls.conn.identity.create_project(name=cls.PROJECT_NAME)
@classmethod
def tearDownClass(cls):
cls.conn.identity.delete_project(cls.PROJECT.id)
def test_list(self):
qot = self.conn.network.quotas().next()
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
project_quota = self.conn.network.quotas().next()
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
Fix network quota test so it works on gate
The gate does not create quotas by default, but devstack does.
This test is not important enough to make work for the gate which
would probably require some reconfiguration, but it is nice to
have it for devstack.
Change-Id: I6618b5ee8c1dde7773b83e8ba97092f30d595e8a
Partial-bug: #1665495 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
PROJECT_NAME = 'project-' + uuid.uuid4().hex
PROJECT = None
@classmethod
def setUpClass(cls):
super(TestQuota, cls).setUpClass()
# Need a project to have a quota
cls.PROJECT = cls.conn.identity.create_project(name=cls.PROJECT_NAME)
@classmethod
def tearDownClass(cls):
cls.conn.identity.delete_project(cls.PROJECT.id)
def test_list(self):
qot = self.conn.network.quotas().next()
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
project_quota = self.conn.network.quotas().next()
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
<commit_msg>Fix network quota test so it works on gate
The gate does not create quotas by default, but devstack does.
This test is not important enough to make work for the gate which
would probably require some reconfiguration, but it is nice to
have it for devstack.
Change-Id: I6618b5ee8c1dde7773b83e8ba97092f30d595e8a
Partial-bug: #1665495<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
PROJECT_NAME = 'project-' + uuid.uuid4().hex
PROJECT = None
@classmethod
def setUpClass(cls):
super(TestQuota, cls).setUpClass()
# Need a project to have a quota
cls.PROJECT = cls.conn.identity.create_project(name=cls.PROJECT_NAME)
@classmethod
def tearDownClass(cls):
cls.conn.identity.delete_project(cls.PROJECT.id)
def test_list(self):
qot = self.conn.network.quotas().next()
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
project_quota = self.conn.network.quotas().next()
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
Fix network quota test so it works on gate
The gate does not create quotas by default, but devstack does.
This test is not important enough to make work for the gate which
would probably require some reconfiguration, but it is nice to
have it for devstack.
Change-Id: I6618b5ee8c1dde7773b83e8ba97092f30d595e8a
Partial-bug: #1665495# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
PROJECT_NAME = 'project-' + uuid.uuid4().hex
PROJECT = None
@classmethod
def setUpClass(cls):
super(TestQuota, cls).setUpClass()
# Need a project to have a quota
cls.PROJECT = cls.conn.identity.create_project(name=cls.PROJECT_NAME)
@classmethod
def tearDownClass(cls):
cls.conn.identity.delete_project(cls.PROJECT.id)
def test_list(self):
qot = self.conn.network.quotas().next()
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
project_quota = self.conn.network.quotas().next()
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
<commit_msg>Fix network quota test so it works on gate
The gate does not create quotas by default, but devstack does.
This test is not important enough to make work for the gate which
would probably require some reconfiguration, but it is nice to
have it for devstack.
Change-Id: I6618b5ee8c1dde7773b83e8ba97092f30d595e8a
Partial-bug: #1665495<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
|
5c1fa71bf4d6dbe6fef0836a03f6b5d85e924f41 | src/urllib3/_version.py | src/urllib3/_version.py | # This file is protected via CODEOWNERS
__version__ = "1.25.9"
| # This file is protected via CODEOWNERS
__version__ = "1.26.0.dev0"
| Mark master branch as 1.26.0.dev0 for RTD | Mark master branch as 1.26.0.dev0 for RTD | Python | mit | sigmavirus24/urllib3,sigmavirus24/urllib3,urllib3/urllib3,urllib3/urllib3 | # This file is protected via CODEOWNERS
__version__ = "1.25.9"
Mark master branch as 1.26.0.dev0 for RTD | # This file is protected via CODEOWNERS
__version__ = "1.26.0.dev0"
| <commit_before># This file is protected via CODEOWNERS
__version__ = "1.25.9"
<commit_msg>Mark master branch as 1.26.0.dev0 for RTD<commit_after> | # This file is protected via CODEOWNERS
__version__ = "1.26.0.dev0"
| # This file is protected via CODEOWNERS
__version__ = "1.25.9"
Mark master branch as 1.26.0.dev0 for RTD# This file is protected via CODEOWNERS
__version__ = "1.26.0.dev0"
| <commit_before># This file is protected via CODEOWNERS
__version__ = "1.25.9"
<commit_msg>Mark master branch as 1.26.0.dev0 for RTD<commit_after># This file is protected via CODEOWNERS
__version__ = "1.26.0.dev0"
|
e5591918d9ec792c64a25670f7bf7fde87ac078d | espei/citing.py | espei/citing.py | """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
| """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
| Fix unicode in citation (again) | DOC: Fix unicode in citation (again)
| Python | mit | PhasesResearchLab/ESPEI | """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
DOC: Fix unicode in citation (again) | """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
| <commit_before>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
<commit_msg>DOC: Fix unicode in citation (again)<commit_after> | """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
| """
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
DOC: Fix unicode in citation (again)"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
| <commit_before>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
<commit_msg>DOC: Fix unicode in citation (again)<commit_after>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
|
fde67686d2bd685e31cfc0e156314476b057db78 | xudd/tests/test_demos.py | xudd/tests/test_demos.py | from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
| from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
| Add inter-hive communication lotsamessages test | Add inter-hive communication lotsamessages test
| Python | apache-2.0 | xudd/xudd | from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
Add inter-hive communication lotsamessages test | from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
| <commit_before>from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
<commit_msg>Add inter-hive communication lotsamessages test<commit_after> | from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
| from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
Add inter-hive communication lotsamessages testfrom xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
| <commit_before>from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
<commit_msg>Add inter-hive communication lotsamessages test<commit_after>from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
|
8b9ef7e731a8801535b9348dc0a6869de6c9ecfc | tests/__init__.py | tests/__init__.py | """
tests
~~~~~
Unit tests for the project.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
| """
tests
~~~~~
Tests for the library and tools.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
| Make the description of the tests package more precise. | Make the description of the tests package more precise.
| Python | mit | s3rvac/retdec-python | """
tests
~~~~~
Unit tests for the project.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
Make the description of the tests package more precise. | """
tests
~~~~~
Tests for the library and tools.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
| <commit_before>"""
tests
~~~~~
Unit tests for the project.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
<commit_msg>Make the description of the tests package more precise.<commit_after> | """
tests
~~~~~
Tests for the library and tools.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
| """
tests
~~~~~
Unit tests for the project.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
Make the description of the tests package more precise."""
tests
~~~~~
Tests for the library and tools.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
| <commit_before>"""
tests
~~~~~
Unit tests for the project.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
<commit_msg>Make the description of the tests package more precise.<commit_after>"""
tests
~~~~~
Tests for the library and tools.
:copyright: © 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: MIT, see the ``LICENSE`` file for more details
"""
|
1fa2af46d9f1ee05d4e4fd16869803c3dfff23e0 | tests/protocol/primitives_tests.py | tests/protocol/primitives_tests.py | import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '"u\'foobar\'"')
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
| import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '%r' % repr(u"foobar"))
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
| Fix repr test for py34. | Fix repr test for py34.
| Python | apache-2.0 | wglass/kiel | import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '"u\'foobar\'"')
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
Fix repr test for py34. | import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '%r' % repr(u"foobar"))
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
| <commit_before>import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '"u\'foobar\'"')
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
<commit_msg>Fix repr test for py34.<commit_after> | import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '%r' % repr(u"foobar"))
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
| import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '"u\'foobar\'"')
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
Fix repr test for py34.import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '%r' % repr(u"foobar"))
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
| <commit_before>import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '"u\'foobar\'"')
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
<commit_msg>Fix repr test for py34.<commit_after>import unittest
from kiel.protocol import primitives
class PrimitivesTests(unittest.TestCase):
def test_string_repr(self):
s = primitives.String(u"foobar")
self.assertEqual(repr(s), '%r' % repr(u"foobar"))
def test_array_repr(self):
a = primitives.Array.of(primitives.Int32)([1, 3, 6, 9])
self.assertEqual(repr(a), "[1, 3, 6, 9]")
|
6191a5afd390cbd7e892e73af959d8d4cd68f52b | moksha/widgets/iframe.py | moksha/widgets/iframe.py | # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
| # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
| Make our IFrameWidget more configurable | Make our IFrameWidget more configurable
| Python | apache-2.0 | lmacken/moksha,mokshaproject/moksha,mokshaproject/moksha,ralphbean/moksha,lmacken/moksha,ralphbean/moksha,lmacken/moksha,ralphbean/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,pombredanne/moksha,pombredanne/moksha | # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
Make our IFrameWidget more configurable | # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
| <commit_before># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
<commit_msg>Make our IFrameWidget more configurable<commit_after> | # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
| # This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
Make our IFrameWidget more configurable# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
| <commit_before># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
<commit_msg>Make our IFrameWidget more configurable<commit_after># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`moksha.widgets.iframe` - An IFrame Widget
===============================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
|
d8623cb31b463ef98fbca0288e34c6ae24df4c82 | statsmodels/sandbox/stats/tests/test_runs.py | statsmodels/sandbox/stats/tests/test_runs.py | """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
| """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
| Fix missing line at end of file | STYLE: Fix missing line at end of file
Fix missing line
Remove whitespace | Python | bsd-3-clause | statsmodels/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels | """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
STYLE: Fix missing line at end of file
Fix missing line
Remove whitespace | """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
| <commit_before>"""
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
<commit_msg>STYLE: Fix missing line at end of file
Fix missing line
Remove whitespace<commit_after> | """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
| """
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
STYLE: Fix missing line at end of file
Fix missing line
Remove whitespace"""
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
| <commit_before>"""
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
<commit_msg>STYLE: Fix missing line at end of file
Fix missing line
Remove whitespace<commit_after>"""
Tests corresponding to sandbox.stats.runs
"""
from numpy.testing import assert_almost_equal
from statsmodels.sandbox.stats.runs import runstest_1samp
def test_mean_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "mean"
expected = (-4.007095978613213, 6.146988816717466e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_median_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = "median"
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
def test_numeric_cutoff():
x = [1] * 5 + [2] * 6 + [3] * 8
cutoff = 2
expected = (-3.944254410803499, 8.004864125547193e-05)
results = runstest_1samp(x, cutoff=cutoff, correction=False)
assert_almost_equal(expected, results)
|
2f041e6ed7d07ef8932350b68581e8dfeaef903f | dashboard/dashboard/pinpoint/handlers/job.py | dashboard/dashboard/pinpoint/handlers/job.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
self.response.write(json.dumps({'data': job.AsDict()}))
except: # pylint: disable=bare-except
# There's no narrower exception we can catch. Catching
# google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# doesn't appear to work here.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
del job
| # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
except Exception as e: # pylint: disable=broad-except
# Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# directly doesn't work.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
raise
self.response.write(json.dumps({'data': job.AsDict()}))
| Move Job handler out of exception block. | [pinpoint] Move Job handler out of exception block.
The exception block is solely used for Job loading exceptions.
Review-Url: https://codereview.chromium.org/2768293003
| Python | bsd-3-clause | catapult-project/catapult,sahiljain/catapult,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult-csm | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
self.response.write(json.dumps({'data': job.AsDict()}))
except: # pylint: disable=bare-except
# There's no narrower exception we can catch. Catching
# google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# doesn't appear to work here.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
del job
[pinpoint] Move Job handler out of exception block.
The exception block is solely used for Job loading exceptions.
Review-Url: https://codereview.chromium.org/2768293003 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
except Exception as e: # pylint: disable=broad-except
# Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# directly doesn't work.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
raise
self.response.write(json.dumps({'data': job.AsDict()}))
| <commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
self.response.write(json.dumps({'data': job.AsDict()}))
except: # pylint: disable=bare-except
# There's no narrower exception we can catch. Catching
# google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# doesn't appear to work here.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
del job
<commit_msg>[pinpoint] Move Job handler out of exception block.
The exception block is solely used for Job loading exceptions.
Review-Url: https://codereview.chromium.org/2768293003<commit_after> | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
except Exception as e: # pylint: disable=broad-except
# Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# directly doesn't work.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
raise
self.response.write(json.dumps({'data': job.AsDict()}))
| # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
self.response.write(json.dumps({'data': job.AsDict()}))
except: # pylint: disable=bare-except
# There's no narrower exception we can catch. Catching
# google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# doesn't appear to work here.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
del job
[pinpoint] Move Job handler out of exception block.
The exception block is solely used for Job loading exceptions.
Review-Url: https://codereview.chromium.org/2768293003# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
except Exception as e: # pylint: disable=broad-except
# Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# directly doesn't work.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
raise
self.response.write(json.dumps({'data': job.AsDict()}))
| <commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
self.response.write(json.dumps({'data': job.AsDict()}))
except: # pylint: disable=bare-except
# There's no narrower exception we can catch. Catching
# google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# doesn't appear to work here.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
del job
<commit_msg>[pinpoint] Move Job handler out of exception block.
The exception block is solely used for Job loading exceptions.
Review-Url: https://codereview.chromium.org/2768293003<commit_after># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import webapp2
from dashboard.pinpoint.models import job as job_module
class JobHandler(webapp2.RequestHandler):
def post(self):
job_id = self.request.get('job_id')
# Validate parameters.
try:
job = job_module.JobFromId(job_id)
except Exception as e: # pylint: disable=broad-except
# Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError
# directly doesn't work.
# https://github.com/googlecloudplatform/datastore-ndb-python/issues/143
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
self.response.write(json.dumps({'error': 'Unknown job id.'}))
return
raise
self.response.write(json.dumps({'data': job.AsDict()}))
|
132d160a365580d77c1f5763b1fd1ac044133bb0 | NYTimesArticleAPI/__init__.py | NYTimesArticleAPI/__init__.py | from nytapi import *
__version__ = "1.0.0"
__author__ = "Matt Morrison @MattDMo mattdmo@mattdmo.com"
__all__ = ["articleAPI"]
| from search_api import *
__version__ = "1.0.0"
__author__ = "Matt Morrison (@MattDMo)"
__all__ = ["articleAPI"]
if __name__ == "__main__":
print("This module cannot be run on its own. Please use by running ",
"\"from NYTimesArticleAPI import articleAPI\"")
exit(0)
| Print message if module is run on its own | Print message if module is run on its own
| Python | mit | MattDMo/NYTimesArticleAPI | from nytapi import *
__version__ = "1.0.0"
__author__ = "Matt Morrison @MattDMo mattdmo@mattdmo.com"
__all__ = ["articleAPI"]
Print message if module is run on its own | from search_api import *
__version__ = "1.0.0"
__author__ = "Matt Morrison (@MattDMo)"
__all__ = ["articleAPI"]
if __name__ == "__main__":
print("This module cannot be run on its own. Please use by running ",
"\"from NYTimesArticleAPI import articleAPI\"")
exit(0)
| <commit_before>from nytapi import *
__version__ = "1.0.0"
__author__ = "Matt Morrison @MattDMo mattdmo@mattdmo.com"
__all__ = ["articleAPI"]
<commit_msg>Print message if module is run on its own<commit_after> | from search_api import *
__version__ = "1.0.0"
__author__ = "Matt Morrison (@MattDMo)"
__all__ = ["articleAPI"]
if __name__ == "__main__":
print("This module cannot be run on its own. Please use by running ",
"\"from NYTimesArticleAPI import articleAPI\"")
exit(0)
| from nytapi import *
__version__ = "1.0.0"
__author__ = "Matt Morrison @MattDMo mattdmo@mattdmo.com"
__all__ = ["articleAPI"]
Print message if module is run on its ownfrom search_api import *
__version__ = "1.0.0"
__author__ = "Matt Morrison (@MattDMo)"
__all__ = ["articleAPI"]
if __name__ == "__main__":
print("This module cannot be run on its own. Please use by running ",
"\"from NYTimesArticleAPI import articleAPI\"")
exit(0)
| <commit_before>from nytapi import *
__version__ = "1.0.0"
__author__ = "Matt Morrison @MattDMo mattdmo@mattdmo.com"
__all__ = ["articleAPI"]
<commit_msg>Print message if module is run on its own<commit_after>from search_api import *
__version__ = "1.0.0"
__author__ = "Matt Morrison (@MattDMo)"
__all__ = ["articleAPI"]
if __name__ == "__main__":
print("This module cannot be run on its own. Please use by running ",
"\"from NYTimesArticleAPI import articleAPI\"")
exit(0)
|
bf1cc589147429eb4cc125904c7c0690a6deaf1c | testsuite/N802.py | testsuite/N802.py | #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
| #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
| Add more tests around ignored names | Add more tests around ignored names
| Python | mit | flintwork/pep8-naming | #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
Add more tests around ignored names | #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
| <commit_before>#: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
<commit_msg>Add more tests around ignored names<commit_after> | #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
| #: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
Add more tests around ignored names#: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
| <commit_before>#: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
<commit_msg>Add more tests around ignored names<commit_after>#: Okay
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
|
36663add9f53da925f1d29c8c567ab30a1f33139 | tests/api_resources/checkout/test_session.py | tests/api_resources/checkout/test_session.py | from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
| from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
| Add support for retrieving a Checkout Session | Add support for retrieving a Checkout Session
| Python | mit | stripe/stripe-python | from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
Add support for retrieving a Checkout Session | from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
| <commit_before>from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
<commit_msg>Add support for retrieving a Checkout Session<commit_after> | from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
| from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
Add support for retrieving a Checkout Sessionfrom __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
| <commit_before>from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
<commit_msg>Add support for retrieving a Checkout Session<commit_after>from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "test@stripe.com"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
|
ce5b3402d9dc5bf69b96c45a810a987d6d4b4231 | tests/functional_tests/test_valid_recipes.py | tests/functional_tests/test_valid_recipes.py | import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
| import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
| Change exception from RecipeError to SystemExit | Change exception from RecipeError to SystemExit
| Python | bsd-3-clause | mandeep/conda-verify | import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
Change exception from RecipeError to SystemExit | import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
| <commit_before>import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
<commit_msg>Change exception from RecipeError to SystemExit<commit_after> | import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
| import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
Change exception from RecipeError to SystemExitimport os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
| <commit_before>import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
<commit_msg>Change exception from RecipeError to SystemExit<commit_after>import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
|
11feab5b49bf818e8dde90497d90dafc7ceb5183 | src/locations/models.py | src/locations/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| Order locations and districts by name | Order locations and districts by name
| Python | mit | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
Order locations and districts by name | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| <commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
<commit_msg>Order locations and districts by name<commit_after> | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
Order locations and districts by namefrom django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| <commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
<commit_msg>Order locations and districts by name<commit_after>from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
|
760af101c3b47fa4cf4aaeba5bc67aa94d2ba060 | src/Exscript/Interpreter/stdlib/IPv4/util.py | src/Exscript/Interpreter/stdlib/IPv4/util.py | import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
| import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('!L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('!L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
| Enforce endianess when converting IPs to long. | Enforce endianess when converting IPs to long.
git-svn-id: 21715c51dd601a1fb57681abbfe4e8ed6f4259bf@205 4c10cf09-d433-0410-9a0a-09c53010615a
| Python | mit | maximumG/exscript,knipknap/exscript,knipknap/exscript,maximumG/exscript | import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
Enforce endianess when converting IPs to long.
git-svn-id: 21715c51dd601a1fb57681abbfe4e8ed6f4259bf@205 4c10cf09-d433-0410-9a0a-09c53010615a | import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('!L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('!L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
| <commit_before>import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
<commit_msg>Enforce endianess when converting IPs to long.
git-svn-id: 21715c51dd601a1fb57681abbfe4e8ed6f4259bf@205 4c10cf09-d433-0410-9a0a-09c53010615a<commit_after> | import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('!L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('!L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
| import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
Enforce endianess when converting IPs to long.
git-svn-id: 21715c51dd601a1fb57681abbfe4e8ed6f4259bf@205 4c10cf09-d433-0410-9a0a-09c53010615aimport socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('!L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('!L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
| <commit_before>import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
<commit_msg>Enforce endianess when converting IPs to long.
git-svn-id: 21715c51dd601a1fb57681abbfe4e8ed6f4259bf@205 4c10cf09-d433-0410-9a0a-09c53010615a<commit_after>import socket, struct, math
def _least_bit(number):
for n in range(0, 31):
if number & (0x00000001l << n) != 0:
return n + 1
return 0
def _highest_bit(number):
if number == 0:
return 0
number -= 1
number |= number >> 1
number |= number >> 2
number |= number >> 4
number |= number >> 8
number |= number >> 16
number += 1
return math.sqrt(number)
def ip2int(ip):
if ip == '255.255.255.255':
return 0xFFFFFFFFl
return struct.unpack('!L', socket.inet_aton(ip))[0]
def int2ip(number):
return socket.inet_ntoa(struct.pack('!L', number))
def pfxlen2mask(pfxlen):
return 0xFFFFFFFFl << (32 - int(pfxlen))
def mask2pfxlen(mask):
mask_int = ip2int(mask)
return 33 - _least_bit(mask_int)
def parse_prefix(prefix, default_mask = 24):
if '/' in prefix:
(network, pfxlen) = prefix.split('/')
else:
network = prefix
pfxlen = default_mask
return (ip2int(network), int(pfxlen))
|
45add3b1d96022244b372fe07d6f6dceab23786d | councilmatic_core/management/commands/update_headshots.py | councilmatic_core/management/commands/update_headshots.py | from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
| from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = 'ocd-person-' + councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
| Add prefix to headshot filenames for easy exclusion from gitignore | Add prefix to headshot filenames for easy exclusion from gitignore
| Python | mit | datamade/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic | from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
Add prefix to headshot filenames for easy exclusion from gitignore | from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = 'ocd-person-' + councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
<commit_msg>Add prefix to headshot filenames for easy exclusion from gitignore<commit_after> | from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = 'ocd-person-' + councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
| from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
Add prefix to headshot filenames for easy exclusion from gitignorefrom django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = 'ocd-person-' + councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
<commit_msg>Add prefix to headshot filenames for easy exclusion from gitignore<commit_after>from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from opencivicdata.core.models import Person as OCDPerson
import requests
class Command(BaseCommand):
help = 'Attach headshots to councilmembers'
def handle(self, *args, **opetions):
for person in OCDPerson.objects.exclude(image=''):
councilmatic_person = person.councilmatic_person
filename = 'ocd-person-' + councilmatic_person.slug + '.jpg'
response = requests.get(person.image)
print(person.image)
with open('/tmp/' + filename, 'wb') as f:
f.write(response.content)
with open('/tmp/' + filename, 'rb') as f:
django_file = File(f)
councilmatic_person.headshot.save(filename, django_file)
|
d07a009d28f3ea17558fd867817f3b19ad93ddfe | lobster/commands/configure.py | lobster/commands/configure.py | import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command)
except Exception as e:
logger.error("can't change values: {}".format(e))
| import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command + '\n')
except Exception as e:
logger.error("can't change values: {}".format(e))
| Send a newline with configuration commands. | Send a newline with configuration commands.
| Python | mit | matz-e/lobster,matz-e/lobster,matz-e/lobster | import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command)
except Exception as e:
logger.error("can't change values: {}".format(e))
Send a newline with configuration commands. | import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command + '\n')
except Exception as e:
logger.error("can't change values: {}".format(e))
| <commit_before>import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command)
except Exception as e:
logger.error("can't change values: {}".format(e))
<commit_msg>Send a newline with configuration commands.<commit_after> | import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command + '\n')
except Exception as e:
logger.error("can't change values: {}".format(e))
| import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command)
except Exception as e:
logger.error("can't change values: {}".format(e))
Send a newline with configuration commands.import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command + '\n')
except Exception as e:
logger.error("can't change values: {}".format(e))
| <commit_before>import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command)
except Exception as e:
logger.error("can't change values: {}".format(e))
<commit_msg>Send a newline with configuration commands.<commit_after>import logging
import os
from lobster import util
from lobster.core.command import Command
from lockfile import AlreadyLocked
class Configure(Command):
@property
def help(self):
return 'change the configuration of a running lobster process'
def setup(self, argparser):
argparser.add_argument('command', help='a python expression to change a mutable configuration setting')
def run(self, args):
logger = logging.getLogger('lobster.configure')
try:
pidfile = util.get_lock(args.config.workdir)
logger.info("Lobster process not running, directly changing configuration.")
with util.PartiallyMutable.lockdown():
exec args.command in {'config': args.config, 'storage': args.config.storage}, {}
args.config.save()
except AlreadyLocked:
logger.info("Lobster process still running, contacting process...")
logger.info("sending command: " + args.command)
logger.info("check the log of the main process for success")
icp = open(os.path.join(args.config.workdir, 'ipc'), 'w')
icp.write(args.command + '\n')
except Exception as e:
logger.error("can't change values: {}".format(e))
|
98307aec0d3182e3e461bd1ed287b75b26ae6e36 | migrations/0013_update_counter_options.py | migrations/0013_update_counter_options.py | import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
| import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options and options['rowNumber'] is not None:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
| Make the counter migration safer. | Make the counter migration safer. | Python | bsd-2-clause | amino-data/redash,rockwotj/redash,getredash/redash,jmvasquez/redashtest,hudl/redash,pubnative/redash,getredash/redash,vishesh92/redash,stefanseifert/redash,alexanderlz/redash,getredash/redash,stefanseifert/redash,denisov-vlad/redash,ninneko/redash,crowdworks/redash,denisov-vlad/redash,easytaxibr/redash,amino-data/redash,easytaxibr/redash,44px/redash,hudl/redash,getredash/redash,M32Media/redash,stefanseifert/redash,moritz9/redash,denisov-vlad/redash,akariv/redash,akariv/redash,denisov-vlad/redash,moritz9/redash,ninneko/redash,jmvasquez/redashtest,useabode/redash,rockwotj/redash,pubnative/redash,hudl/redash,imsally/redash,EverlyWell/redash,ninneko/redash,M32Media/redash,M32Media/redash,crowdworks/redash,imsally/redash,rockwotj/redash,vishesh92/redash,chriszs/redash,chriszs/redash,useabode/redash,amino-data/redash,easytaxibr/redash,useabode/redash,ninneko/redash,moritz9/redash,chriszs/redash,hudl/redash,jmvasquez/redashtest,M32Media/redash,rockwotj/redash,alexanderlz/redash,easytaxibr/redash,crowdworks/redash,EverlyWell/redash,EverlyWell/redash,vishesh92/redash,guaguadev/redash,44px/redash,pubnative/redash,pubnative/redash,guaguadev/redash,jmvasquez/redashtest,44px/redash,imsally/redash,pubnative/redash,vishesh92/redash,ninneko/redash,EverlyWell/redash,crowdworks/redash,imsally/redash,akariv/redash,guaguadev/redash,guaguadev/redash,stefanseifert/redash,44px/redash,amino-data/redash,easytaxibr/redash,moritz9/redash,denisov-vlad/redash,stefanseifert/redash,akariv/redash,akariv/redash,useabode/redash,chriszs/redash,getredash/redash,alexanderlz/redash,alexanderlz/redash,jmvasquez/redashtest,guaguadev/redash | import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
Make the counter migration safer. | import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options and options['rowNumber'] is not None:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
| <commit_before>import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
<commit_msg>Make the counter migration safer.<commit_after> | import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options and options['rowNumber'] is not None:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
| import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
Make the counter migration safer.import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options and options['rowNumber'] is not None:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
| <commit_before>import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
<commit_msg>Make the counter migration safer.<commit_after>import json
from redash import models
if __name__ == '__main__':
for vis in models.Visualization.select():
if vis.type == 'COUNTER':
options = json.loads(vis.options)
print "Before: ", options
if 'rowNumber' in options and options['rowNumber'] is not None:
options['rowNumber'] += 1
else:
options['rowNumber'] = 1
if 'counterColName' not in options:
options['counterColName'] = 'counter'
if 'targetColName' not in options:
options['targetColName'] = 'target'
options['targetRowNumber'] = options['rowNumber']
print "After: ", options
vis.options = json.dumps(options)
vis.save()
|
deb66ebeca0b39c7ce62fc95b8a01bf973739e86 | rosidl_generator_py/rosidl_generator_py/__init__.py | rosidl_generator_py/rosidl_generator_py/__init__.py | # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
| # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
assert generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
| Fix pyflakes complaining of imported but unused module. | Fix pyflakes complaining of imported but unused module.
| Python | apache-2.0 | esteve/rosidl,ros2/rosidl_typesupport,ros2/rosidl_typesupport,esteve/rosidl,esteve/rosidl_typesupport,esteve/rosidl_typesupport,esteve/rosidl_typesupport,ros2/rosidl,ros2/rosidl,ros2/rosidl,ros2/rosidl_typesupport,esteve/rosidl,ros2/rosidl | # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
Fix pyflakes complaining of imported but unused module. | # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
assert generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
| <commit_before># Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
<commit_msg>Fix pyflakes complaining of imported but unused module.<commit_after> | # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
assert generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
| # Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
Fix pyflakes complaining of imported but unused module.# Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
assert generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
| <commit_before># Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
<commit_msg>Fix pyflakes complaining of imported but unused module.<commit_after># Copyright 2014-2015 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from .import_type_support_impl import import_type_support
__all__ = ['import_type_support']
try:
from .generate_py_impl import generate_py
assert generate_py
__all__.append('generate_py')
except ImportError:
logger = logging.getLogger('rosidl_generator_py')
logger.debug(
'Failed to import modules for generating Python structures:\n' + traceback.format_exc())
|
fab1a05fbeaf082432bd9f05b1fa721519838aff | ObjectTracking/DisplayIPVideoStream.py | ObjectTracking/DisplayIPVideoStream.py | import cv2
cv2.namedWindow("preview")
vc = cv2.VideoCapture(0)
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
| #!/usr/bin/python
# coding=utf-8
import cv2
import cv2.cv as cv
import datetime
cv2.namedWindow("preview")
# Video feed created with android ip webcam program
vc = cv2.VideoCapture("http://192.168.1.2:8080/videofeed?something.mjpeg")
# Save output video
width, height = 640, 480
writer = cv2.VideoWriter(filename="outputVideo.avi",
fourcc=cv.CV_FOURCC('M','J', 'P', 'G'),
fps=15,
frameSize=(width, height))
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
key -= 0x100000 # Corrects bug in openCV...
if key == 27: # exit on ESC
break
elif key==115: # s key for snapshot
cv2.imwrite(datetime.datetime.utcnow().strftime("%Yy%mm%dd%Hh%Mm%Ss")+'.jpg', frame)
writer.write(frame)
cv2.destroyAllWindows()
| Add snapshot feature on s key press Add recording feature | Add snapshot feature on s key press
Add recording feature
| Python | mit | baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite | import cv2
cv2.namedWindow("preview")
vc = cv2.VideoCapture(0)
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
Add snapshot feature on s key press
Add recording feature | #!/usr/bin/python
# coding=utf-8
import cv2
import cv2.cv as cv
import datetime
cv2.namedWindow("preview")
# Video feed created with android ip webcam program
vc = cv2.VideoCapture("http://192.168.1.2:8080/videofeed?something.mjpeg")
# Save output video
width, height = 640, 480
writer = cv2.VideoWriter(filename="outputVideo.avi",
fourcc=cv.CV_FOURCC('M','J', 'P', 'G'),
fps=15,
frameSize=(width, height))
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
key -= 0x100000 # Corrects bug in openCV...
if key == 27: # exit on ESC
break
elif key==115: # s key for snapshot
cv2.imwrite(datetime.datetime.utcnow().strftime("%Yy%mm%dd%Hh%Mm%Ss")+'.jpg', frame)
writer.write(frame)
cv2.destroyAllWindows()
| <commit_before>import cv2
cv2.namedWindow("preview")
vc = cv2.VideoCapture(0)
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
<commit_msg>Add snapshot feature on s key press
Add recording feature<commit_after> | #!/usr/bin/python
# coding=utf-8
import cv2
import cv2.cv as cv
import datetime
cv2.namedWindow("preview")
# Video feed created with android ip webcam program
vc = cv2.VideoCapture("http://192.168.1.2:8080/videofeed?something.mjpeg")
# Save output video
width, height = 640, 480
writer = cv2.VideoWriter(filename="outputVideo.avi",
fourcc=cv.CV_FOURCC('M','J', 'P', 'G'),
fps=15,
frameSize=(width, height))
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
key -= 0x100000 # Corrects bug in openCV...
if key == 27: # exit on ESC
break
elif key==115: # s key for snapshot
cv2.imwrite(datetime.datetime.utcnow().strftime("%Yy%mm%dd%Hh%Mm%Ss")+'.jpg', frame)
writer.write(frame)
cv2.destroyAllWindows()
| import cv2
cv2.namedWindow("preview")
vc = cv2.VideoCapture(0)
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
Add snapshot feature on s key press
Add recording feature#!/usr/bin/python
# coding=utf-8
import cv2
import cv2.cv as cv
import datetime
cv2.namedWindow("preview")
# Video feed created with android ip webcam program
vc = cv2.VideoCapture("http://192.168.1.2:8080/videofeed?something.mjpeg")
# Save output video
width, height = 640, 480
writer = cv2.VideoWriter(filename="outputVideo.avi",
fourcc=cv.CV_FOURCC('M','J', 'P', 'G'),
fps=15,
frameSize=(width, height))
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
key -= 0x100000 # Corrects bug in openCV...
if key == 27: # exit on ESC
break
elif key==115: # s key for snapshot
cv2.imwrite(datetime.datetime.utcnow().strftime("%Yy%mm%dd%Hh%Mm%Ss")+'.jpg', frame)
writer.write(frame)
cv2.destroyAllWindows()
| <commit_before>import cv2
cv2.namedWindow("preview")
vc = cv2.VideoCapture(0)
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
<commit_msg>Add snapshot feature on s key press
Add recording feature<commit_after>#!/usr/bin/python
# coding=utf-8
import cv2
import cv2.cv as cv
import datetime
cv2.namedWindow("preview")
# Video feed created with android ip webcam program
vc = cv2.VideoCapture("http://192.168.1.2:8080/videofeed?something.mjpeg")
# Save output video
width, height = 640, 480
writer = cv2.VideoWriter(filename="outputVideo.avi",
fourcc=cv.CV_FOURCC('M','J', 'P', 'G'),
fps=15,
frameSize=(width, height))
if vc.isOpened(): # try to get the first frame
rval, frame = vc.read()
else:
rval = False
while rval:
cv2.imshow("preview", frame)
rval, frame = vc.read()
key = cv2.waitKey(20)
key -= 0x100000 # Corrects bug in openCV...
if key == 27: # exit on ESC
break
elif key==115: # s key for snapshot
cv2.imwrite(datetime.datetime.utcnow().strftime("%Yy%mm%dd%Hh%Mm%Ss")+'.jpg', frame)
writer.write(frame)
cv2.destroyAllWindows()
|
41854e9dbed5780359659f6717f16e08caecb8e8 | diss/__init__.py | diss/__init__.py |
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': filepath,
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
|
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
| Fix filename instead of path in meta | Fix filename instead of path in meta
| Python | agpl-3.0 | hoh/Billabong,hoh/Billabong |
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': filepath,
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
Fix filename instead of path in meta |
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
| <commit_before>
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': filepath,
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
<commit_msg>Fix filename instead of path in meta<commit_after> |
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
|
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': filepath,
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
Fix filename instead of path in meta
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
| <commit_before>
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': filepath,
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
<commit_msg>Fix filename instead of path in meta<commit_after>
import os
import hashlib
import magic
from datetime import datetime
from .settings import METADATA_PATH
from .meta import get_meta
from .encryption import copy_and_encrypt, decrypt_blob
from .utils import dumps
hashing = hashlib.sha256
def save_metadata(meta):
destination = os.path.join(METADATA_PATH, meta['id'] + '.json')
open(destination, 'w').write(dumps(meta))
def add_file(filepath):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
key = b'0'*32
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
# TODO: replace by a hash of the encrypted file
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': key,
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
save_metadata(meta)
return meta
def get_content(id_):
key = get_meta(id_)['key']
return decrypt_blob(id_, key)
|
6bcf987ac927c4cd9829b55ec2521d77fcc2c3ad | examples/test_mfa_login.py | examples/test_mfa_login.py | from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
| from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.click('a:contains("This Page")')
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
| Add a click() call to an example test | Add a click() call to an example test
| Python | mit | mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
Add a click() call to an example test | from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.click('a:contains("This Page")')
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
| <commit_before>from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
<commit_msg>Add a click() call to an example test<commit_after> | from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.click('a:contains("This Page")')
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
| from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
Add a click() call to an example testfrom seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.click('a:contains("This Page")')
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
| <commit_before>from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
<commit_msg>Add a click() call to an example test<commit_after>from seleniumbase import BaseCase
class TestMFALogin(BaseCase):
def test_mfa_login(self):
self.open("https://seleniumbase.io/realworld/login")
self.type("#username", "demo_user")
self.type("#password", "secret_pass")
self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG")
self.highlight("img#image1")
self.click('a:contains("This Page")')
self.assert_text("Welcome!", "h1")
self.save_screenshot_to_logs()
|
f9648e4b48d2affee103ad5f229492254e3e4dc8 | web3/web3/jsonrpc.py | web3/web3/jsonrpc.py | class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for] | import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for] | Move message id generation to requestmanager | Move message id generation to requestmanager
| Python | mit | pipermerriam/web3.py,shravan-shandilya/web3.py | class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]Move message id generation to requestmanager | import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for] | <commit_before>class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]<commit_msg>Move message id generation to requestmanager<commit_after> | import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for] | class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]Move message id generation to requestmanagerimport json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for] | <commit_before>class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]<commit_msg>Move message id generation to requestmanager<commit_after>import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for] |
d9f388d2b486da3bd5e3209db70d3e691aec584d | clowder/clowder/cli/yaml_controller.py | clowder/clowder/cli/yaml_controller.py | from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
| from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
| Add `clowder yaml` logic to Cement controller | Add `clowder yaml` logic to Cement controller
| Python | mit | JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder | from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder yaml` logic to Cement controller | from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
| <commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder yaml` logic to Cement controller<commit_after> | from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
| from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder yaml` logic to Cement controllerfrom __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
| <commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder yaml` logic to Cement controller<commit_after>from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
|
398718e615cab79066779cb19c2023062bc36110 | contrib/core/actions/inject_trigger.py | contrib/core/actions/inject_trigger.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
"""
NOTE: Server where this action run needs to have access to the database.
That's always the case right now, but if this assertion changes in the future, we should move
to utilizing the API for dispatching a trigger.
"""
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the API
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the /webhooks/st2 API endpoint
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
| Remove out of date comment. | Remove out of date comment.
| Python | apache-2.0 | Plexxi/st2,Plexxi/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,nzlosh/st2 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
"""
NOTE: Server where this action run needs to have access to the database.
That's always the case right now, but if this assertion changes in the future, we should move
to utilizing the API for dispatching a trigger.
"""
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the API
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
Remove out of date comment. | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the /webhooks/st2 API endpoint
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
| <commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
"""
NOTE: Server where this action run needs to have access to the database.
That's always the case right now, but if this assertion changes in the future, we should move
to utilizing the API for dispatching a trigger.
"""
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the API
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
<commit_msg>Remove out of date comment.<commit_after> | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the /webhooks/st2 API endpoint
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
"""
NOTE: Server where this action run needs to have access to the database.
That's always the case right now, but if this assertion changes in the future, we should move
to utilizing the API for dispatching a trigger.
"""
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the API
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
Remove out of date comment.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the /webhooks/st2 API endpoint
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
| <commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
"""
NOTE: Server where this action run needs to have access to the database.
That's always the case right now, but if this assertion changes in the future, we should move
to utilizing the API for dispatching a trigger.
"""
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the API
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
<commit_msg>Remove out of date comment.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.runners.base_action import Action
__all__ = [
'InjectTriggerAction'
]
class InjectTriggerAction(Action):
def run(self, trigger, payload=None, trace_tag=None):
payload = payload or {}
datastore_service = self.action_service.datastore_service
client = datastore_service.get_api_client()
# Dispatch the trigger using the /webhooks/st2 API endpoint
self.logger.debug('Injecting trigger "%s" with payload="%s"' % (trigger, str(payload)))
result = client.webhooks.post_generic_webhook(trigger=trigger, payload=payload,
trace_tag=trace_tag)
return result
|
7843ba68582c7da1312d081c0b741d57852d718f | test/utils/filesystem/name_sanitizer_spec.py | test/utils/filesystem/name_sanitizer_spec.py | from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=255, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
| from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=254, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
| Update the maximum allowed length for file name in sanitizer test | Update the maximum allowed length for file name in sanitizer test
| Python | mit | Stvad/CrowdAnki,Stvad/CrowdAnki,Stvad/CrowdAnki | from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=255, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
Update the maximum allowed length for file name in sanitizer test | from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=254, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
| <commit_before>from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=255, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
<commit_msg>Update the maximum allowed length for file name in sanitizer test<commit_after> | from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=254, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
| from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=255, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
Update the maximum allowed length for file name in sanitizer testfrom tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=254, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
| <commit_before>from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=255, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
<commit_msg>Update the maximum allowed length for file name in sanitizer test<commit_after>from tempfile import TemporaryDirectory
from expects import expect
from hypothesis import given, assume, example
from hypothesis.strategies import text, characters
from mamba import description, it
from pathlib import Path
from crowd_anki.utils.filesystem.name_sanitizer import sanitize_anki_deck_name, \
invalid_filename_chars
from test_utils.matchers import contain_any
with description("AnkiDeckNameSanitizer"):
with it("should remove all bad characters from the string"):
expect(sanitize_anki_deck_name(invalid_filename_chars)) \
.not_to(contain_any(*invalid_filename_chars))
with it("should be possible to create a file name from a random sanitized string"):
@given(text(characters(min_codepoint=1, max_codepoint=800), max_size=254, min_size=1))
@example("line\n another one")
def can_create(potential_name):
assume(potential_name not in ('.', '..'))
with TemporaryDirectory() as dir_name:
Path(dir_name).joinpath(sanitize_anki_deck_name(potential_name)).mkdir()
can_create()
|
5945b27aa6b5ae43470738dd6638ffa4617f7be1 | poradnia/users/migrations/0014_auto_20170317_1927.py | poradnia/users/migrations/0014_auto_20170317_1927.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| Fix backward compatibility of migrations | Fix backward compatibility of migrations
| Python | mit | watchdogpolska/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
Fix backward compatibility of migrations | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
<commit_msg>Fix backward compatibility of migrations<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
Fix backward compatibility of migrations# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
<commit_msg>Fix backward compatibility of migrations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
|
32d95efb4549a9cb3b3e6780efb292705de57713 | pronto_praise/pronto_praise/settings/heroku.py | pronto_praise/pronto_praise/settings/heroku.py | import dj_database_url
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
| import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
| Revert "Turn on DEBUG mode" | Revert "Turn on DEBUG mode"
This reverts commit 1c0e3251c3b502375a9738508f20a18ed58532ec.
| Python | mit | prontotools/pronto-praise,prontotools/pronto-praise,prontotools/pronto-praise,prontotools/pronto-praise | import dj_database_url
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
Revert "Turn on DEBUG mode"
This reverts commit 1c0e3251c3b502375a9738508f20a18ed58532ec. | import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
| <commit_before>import dj_database_url
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
<commit_msg>Revert "Turn on DEBUG mode"
This reverts commit 1c0e3251c3b502375a9738508f20a18ed58532ec.<commit_after> | import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
| import dj_database_url
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
Revert "Turn on DEBUG mode"
This reverts commit 1c0e3251c3b502375a9738508f20a18ed58532ec.import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
| <commit_before>import dj_database_url
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
<commit_msg>Revert "Turn on DEBUG mode"
This reverts commit 1c0e3251c3b502375a9738508f20a18ed58532ec.<commit_after>import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
74466e4bdce2a865718701b1bcccb2c884eac6ab | wsgi.py | wsgi.py | #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.DEBUG
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
| #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.INFO
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
| Reduce log level on OpenShift | Reduce log level on OpenShift
| Python | mit | sYnfo/portingdb,ari3s/portingdb,sYnfo/portingdb,fedora-python/portingdb,irushchyshyn/portingdb,irushchyshyn/portingdb,irushchyshyn/portingdb,ari3s/portingdb,irushchyshyn/portingdb,ari3s/portingdb,fedora-python/portingdb,fedora-python/portingdb,sYnfo/portingdb,ari3s/portingdb | #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.DEBUG
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
Reduce log level on OpenShift | #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.INFO
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
| <commit_before>#!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.DEBUG
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
<commit_msg>Reduce log level on OpenShift<commit_after> | #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.INFO
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
| #!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.DEBUG
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
Reduce log level on OpenShift#!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.INFO
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
| <commit_before>#!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.DEBUG
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
<commit_msg>Reduce log level on OpenShift<commit_after>#!/usr/bin/env python3
import os
import logging
from sqlalchemy import create_engine
from portingdb import load
from portingdb import htmlreport
level = logging.INFO
logging.basicConfig(level=level)
logging.getLogger('sqlalchemy.engine').setLevel(level)
sqlite_path = os.path.join(os.environ['OPENSHIFT_TMP_DIR'], 'portingdb.sqlite')
db_url = 'sqlite:///' + sqlite_path
application = htmlreport.create_app(db_url=db_url)
import pprint
pprint.pprint(os.environ)
# For testing only
if __name__ == '__main__':
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
httpd.handle_request()
|
8f1f15dc66c357f2ace070c449dd5e407b1e9f33 | x10d.py | x10d.py | #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = "/dev/ttyACM1"
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
| #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
| Add closing and argument for serial port | Add closing and argument for serial port
| Python | unlicense | umbc-hackafe/x10-controller | #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = "/dev/ttyACM1"
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
Add closing and argument for serial port | #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
| <commit_before>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = "/dev/ttyACM1"
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
<commit_msg>Add closing and argument for serial port<commit_after> | #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
| #!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = "/dev/ttyACM1"
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
Add closing and argument for serial port#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
| <commit_before>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = "/dev/ttyACM1"
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
<commit_msg>Add closing and argument for serial port<commit_after>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
from threading import Thread
import sys
def callback(event):
print("EVENT: {0.house}{0.unit}: {0.command}".format(event))
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
daemon_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
444a66b0b0da31ed4febea2dcd82fbf6d12ea107 | examples/deploy_local_file_resource.py | examples/deploy_local_file_resource.py | """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
application = None
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
except Exception as e:
print(e)
if application:
await application.remove()
await model.disconnect()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| Make sure we cleanup even if fails in example | Make sure we cleanup even if fails in example
| Python | apache-2.0 | juju/python-libjuju,juju/python-libjuju | """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
Make sure we cleanup even if fails in example | """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
application = None
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
except Exception as e:
print(e)
if application:
await application.remove()
await model.disconnect()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| <commit_before>"""
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
<commit_msg>Make sure we cleanup even if fails in example<commit_after> | """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
application = None
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
except Exception as e:
print(e)
if application:
await application.remove()
await model.disconnect()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| """
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
Make sure we cleanup even if fails in example"""
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
application = None
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
except Exception as e:
print(e)
if application:
await application.remove()
await model.disconnect()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| <commit_before>"""
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
<commit_msg>Make sure we cleanup even if fails in example<commit_after>"""
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
application = None
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir)
resources = {"file-res": "test.file"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.wait_for_idle()
print('Removing Charm')
await application.remove()
except Exception as e:
print(e)
if application:
await application.remove()
await model.disconnect()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
|
bea7fb4d47bf5cc87edcf1deff155b694e824295 | webapp/byceps/blueprints/board/formatting.py | webapp/byceps/blueprints/board/formatting.py | # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from cgi import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
| # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from html import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
| Use `html.escape` instead of the deprecated `cgi.escape`. | Use `html.escape` instead of the deprecated `cgi.escape`.
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps | # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from cgi import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
Use `html.escape` instead of the deprecated `cgi.escape`. | # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from html import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
| <commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from cgi import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
<commit_msg>Use `html.escape` instead of the deprecated `cgi.escape`.<commit_after> | # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from html import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
| # -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from cgi import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
Use `html.escape` instead of the deprecated `cgi.escape`.# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from html import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
| <commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from cgi import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
<commit_msg>Use `html.escape` instead of the deprecated `cgi.escape`.<commit_after># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2015 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from html import escape
import bbcode
try:
from .smileys import get_smileys
except ImportError:
get_smileys = lambda: []
try:
from .smileys import replace_smileys
except ImportError:
replace_smileys = lambda x: x
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = escape(options['author'])
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
html = PARSER.format(value)
html = replace_smileys(html)
return html
|
5acee7067df2af2b351bfb4b5757b4d53f023d32 | radio/management/commands/export_talkgroups.py | radio/management/commands/export_talkgroups.py | import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
| import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
| Add system support to export talkgroups | Add system support to export talkgroups
| Python | mit | ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player | import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
Add system support to export talkgroups | import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
| <commit_before>import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
<commit_msg>Add system support to export talkgroups<commit_after> | import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
| import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
Add system support to export talkgroupsimport sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
| <commit_before>import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
<commit_msg>Add system support to export talkgroups<commit_after>import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
|
025bc069e231b58977e7d8ea7c526849f227b9ff | pytest_pipeline/utils.py | pytest_pipeline/utils.py | # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
| # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
| Add utility function for executable checking | Add utility function for executable checking
| Python | bsd-3-clause | bow/pytest-pipeline | # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
Add utility function for executable checking | # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
| <commit_before># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
<commit_msg>Add utility function for executable checking<commit_after> | # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
| # -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
Add utility function for executable checking# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
| <commit_before># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
<commit_msg>Add utility function for executable checking<commit_after># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
fbc0578a0a359c5bed5317a971f40128ca73429e | python/test/providers.py | python/test/providers.py | # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljöklöjkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
| # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljkljkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
| Fix python data provider test | Fix python data provider test
| Python | apache-2.0 | uboot/stromx,uboot/stromx | # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljöklöjkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
Fix python data provider test | # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljkljkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
| <commit_before># -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljöklöjkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
<commit_msg>Fix python data provider test<commit_after> | # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljkljkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
| # -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljöklöjkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
Fix python data provider test# -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljkljkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
| <commit_before># -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljöklöjkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
<commit_msg>Fix python data provider test<commit_after># -*- coding: utf-8 -*-
import stromx.runtime as sr
stream = sr.Stream()
stream.setName("My stream")
data = "dfdsfdsds\nkljkljkfldsjf"
factory = sr.Factory()
with sr.ZipFileOutput("provider_test.zip") as out:
out.initialize("filename")
out.openFile(".txt", sr.OutputProvider.OpenMode.TEXT)
out.file().write(data)
sr.XmlWriter().writeStream(out, "stream", stream)
with sr.ZipFileInput("provider_test.zip") as inp:
inp.initialize("", "filename.txt")
inp.openFile(sr.InputProvider.OpenMode.TEXT)
data = inp.file().read()
stream = sr.XmlReader().readStream(inp, "stream.xml", factory)
print(stream.name())
print(data)
|
40bc1f50e7b0605522feb4ac86daebb9f785eb88 | test/OLItest/globals.py | test/OLItest/globals.py | #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
| #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
| Use only 1 IMAP connection by default | tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de>
| Python | apache-2.0 | frioux/offlineimap,frioux/offlineimap | #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de> | #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
| <commit_before>#Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
<commit_msg>tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de><commit_after> | #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
| #Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de>#Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
| <commit_before>#Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
<commit_msg>tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de><commit_after>#Constants, that don't rely on anything else in the module
# Copyright (C) 2012- Sebastian Spaeth & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
|
bc634d8c04bc15ca381019dda08982b9e1badd1c | sncosmo/tests/test_builtins.py | sncosmo/tests/test_builtins.py | import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
| import pytest
import sncosmo
from sncosmo.bandpasses import _BANDPASSES, _BANDPASS_INTERPOLATORS
from sncosmo.magsystems import _MAGSYSTEMS
from sncosmo.models import _SOURCES
bandpasses = [i['name'] for i in _BANDPASSES.get_loaders_metadata()]
bandpass_interpolators = [i['name'] for i in
_BANDPASS_INTERPOLATORS.get_loaders_metadata()]
magsystems = [i['name'] for i in _MAGSYSTEMS.get_loaders_metadata()]
sources = [(i['name'], i['version']) for i in _SOURCES.get_loaders_metadata()]
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpasses)
def test_builtin_bandpass(name):
sncosmo.get_bandpass(name)
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpass_interpolators)
def test_builtin_bandpass_interpolator(name):
interpolator = _BANDPASS_INTERPOLATORS.retrieve(name)
interpolator.at(interpolator.minpos())
@pytest.mark.might_download
@pytest.mark.parametrize("name,version", sources)
def test_builtin_source(name, version):
sncosmo.get_source(name, version)
@pytest.mark.might_download
@pytest.mark.parametrize("name", magsystems)
def test_builtin_magsystem(name):
sncosmo.get_magsystem(name)
| Add tests for all builtins | Add tests for all builtins
| Python | bsd-3-clause | sncosmo/sncosmo,sncosmo/sncosmo,sncosmo/sncosmo | import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
Add tests for all builtins | import pytest
import sncosmo
from sncosmo.bandpasses import _BANDPASSES, _BANDPASS_INTERPOLATORS
from sncosmo.magsystems import _MAGSYSTEMS
from sncosmo.models import _SOURCES
bandpasses = [i['name'] for i in _BANDPASSES.get_loaders_metadata()]
bandpass_interpolators = [i['name'] for i in
_BANDPASS_INTERPOLATORS.get_loaders_metadata()]
magsystems = [i['name'] for i in _MAGSYSTEMS.get_loaders_metadata()]
sources = [(i['name'], i['version']) for i in _SOURCES.get_loaders_metadata()]
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpasses)
def test_builtin_bandpass(name):
sncosmo.get_bandpass(name)
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpass_interpolators)
def test_builtin_bandpass_interpolator(name):
interpolator = _BANDPASS_INTERPOLATORS.retrieve(name)
interpolator.at(interpolator.minpos())
@pytest.mark.might_download
@pytest.mark.parametrize("name,version", sources)
def test_builtin_source(name, version):
sncosmo.get_source(name, version)
@pytest.mark.might_download
@pytest.mark.parametrize("name", magsystems)
def test_builtin_magsystem(name):
sncosmo.get_magsystem(name)
| <commit_before>import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
<commit_msg>Add tests for all builtins<commit_after> | import pytest
import sncosmo
from sncosmo.bandpasses import _BANDPASSES, _BANDPASS_INTERPOLATORS
from sncosmo.magsystems import _MAGSYSTEMS
from sncosmo.models import _SOURCES
bandpasses = [i['name'] for i in _BANDPASSES.get_loaders_metadata()]
bandpass_interpolators = [i['name'] for i in
_BANDPASS_INTERPOLATORS.get_loaders_metadata()]
magsystems = [i['name'] for i in _MAGSYSTEMS.get_loaders_metadata()]
sources = [(i['name'], i['version']) for i in _SOURCES.get_loaders_metadata()]
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpasses)
def test_builtin_bandpass(name):
sncosmo.get_bandpass(name)
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpass_interpolators)
def test_builtin_bandpass_interpolator(name):
interpolator = _BANDPASS_INTERPOLATORS.retrieve(name)
interpolator.at(interpolator.minpos())
@pytest.mark.might_download
@pytest.mark.parametrize("name,version", sources)
def test_builtin_source(name, version):
sncosmo.get_source(name, version)
@pytest.mark.might_download
@pytest.mark.parametrize("name", magsystems)
def test_builtin_magsystem(name):
sncosmo.get_magsystem(name)
| import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
Add tests for all builtinsimport pytest
import sncosmo
from sncosmo.bandpasses import _BANDPASSES, _BANDPASS_INTERPOLATORS
from sncosmo.magsystems import _MAGSYSTEMS
from sncosmo.models import _SOURCES
bandpasses = [i['name'] for i in _BANDPASSES.get_loaders_metadata()]
bandpass_interpolators = [i['name'] for i in
_BANDPASS_INTERPOLATORS.get_loaders_metadata()]
magsystems = [i['name'] for i in _MAGSYSTEMS.get_loaders_metadata()]
sources = [(i['name'], i['version']) for i in _SOURCES.get_loaders_metadata()]
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpasses)
def test_builtin_bandpass(name):
sncosmo.get_bandpass(name)
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpass_interpolators)
def test_builtin_bandpass_interpolator(name):
interpolator = _BANDPASS_INTERPOLATORS.retrieve(name)
interpolator.at(interpolator.minpos())
@pytest.mark.might_download
@pytest.mark.parametrize("name,version", sources)
def test_builtin_source(name, version):
sncosmo.get_source(name, version)
@pytest.mark.might_download
@pytest.mark.parametrize("name", magsystems)
def test_builtin_magsystem(name):
sncosmo.get_magsystem(name)
| <commit_before>import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
<commit_msg>Add tests for all builtins<commit_after>import pytest
import sncosmo
from sncosmo.bandpasses import _BANDPASSES, _BANDPASS_INTERPOLATORS
from sncosmo.magsystems import _MAGSYSTEMS
from sncosmo.models import _SOURCES
bandpasses = [i['name'] for i in _BANDPASSES.get_loaders_metadata()]
bandpass_interpolators = [i['name'] for i in
_BANDPASS_INTERPOLATORS.get_loaders_metadata()]
magsystems = [i['name'] for i in _MAGSYSTEMS.get_loaders_metadata()]
sources = [(i['name'], i['version']) for i in _SOURCES.get_loaders_metadata()]
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpasses)
def test_builtin_bandpass(name):
sncosmo.get_bandpass(name)
@pytest.mark.might_download
@pytest.mark.parametrize("name", bandpass_interpolators)
def test_builtin_bandpass_interpolator(name):
interpolator = _BANDPASS_INTERPOLATORS.retrieve(name)
interpolator.at(interpolator.minpos())
@pytest.mark.might_download
@pytest.mark.parametrize("name,version", sources)
def test_builtin_source(name, version):
sncosmo.get_source(name, version)
@pytest.mark.might_download
@pytest.mark.parametrize("name", magsystems)
def test_builtin_magsystem(name):
sncosmo.get_magsystem(name)
|
d9e65fbf111f8584189a57059516afafb1e4d04c | test/projection_test.py | test/projection_test.py | from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
| from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
| Use TensorFlow's test utilities and add a test for the L1 projection to simplex. | Use TensorFlow's test utilities and add a test for the L1 projection to simplex.
| Python | mit | AmiiThinks/amii-tf-nn | from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
Use TensorFlow's test utilities and add a test for the L1 projection to simplex. | from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
| <commit_before>from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
<commit_msg>Use TensorFlow's test utilities and add a test for the L1 projection to simplex.<commit_after> | from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
| from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
Use TensorFlow's test utilities and add a test for the L1 projection to simplex.from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
| <commit_before>from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
<commit_msg>Use TensorFlow's test utilities and add a test for the L1 projection to simplex.<commit_after>from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
|
d7a91fe283666f01aa06a707c536893cf1473fe3 | rtwilio/models.py | rtwilio/models.py | import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
| from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
| Use timezone aware datetime now. | Use timezone aware datetime now.
| Python | bsd-3-clause | caktus/rapidsms-twilio | import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
Use timezone aware datetime now. | from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
| <commit_before>import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
<commit_msg>Use timezone aware datetime now.<commit_after> | from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
| import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
Use timezone aware datetime now.from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
| <commit_before>import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
<commit_msg>Use timezone aware datetime now.<commit_after>from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
|
6340e6f02c3655dc2ab33a67491cc9b16e63e5b0 | redux/__main__.py | redux/__main__.py | from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
| from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
| Make compiler invocation more Makefile-friendly | Make compiler invocation more Makefile-friendly
| Python | mit | Muon/redux | from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
Make compiler invocation more Makefile-friendly | from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
| <commit_before>from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
<commit_msg>Make compiler invocation more Makefile-friendly<commit_after> | from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
| from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
Make compiler invocation more Makefile-friendlyfrom redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
| <commit_before>from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
<commit_msg>Make compiler invocation more Makefile-friendly<commit_after>from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
|
5c214680889a40a2963572e1163b8aa6beeaebc4 | bayespy/nodes/__init__.py | bayespy/nodes/__init__.py | ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
| ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
Take
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
| Add Take node to the node list in API doc | DOC: Add Take node to the node list in API doc
| Python | mit | jluttine/bayespy,bayespy/bayespy | ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
DOC: Add Take node to the node list in API doc | ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
Take
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
| <commit_before>################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
<commit_msg>DOC: Add Take node to the node list in API doc<commit_after> | ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
Take
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
| ################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
DOC: Add Take node to the node list in API doc################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
Take
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
| <commit_before>################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
<commit_msg>DOC: Add Take node to the node list in API doc<commit_after>################################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Package for nodes used to construct the model.
Stochastic nodes
================
.. currentmodule:: bayespy.nodes
Nodes for Gaussian variables:
.. autosummary::
:toctree: generated/
Gaussian
GaussianARD
Nodes for precision and scale variables:
.. autosummary::
:toctree: generated/
Gamma
Wishart
Exponential
Nodes for modelling Gaussian and precision variables jointly (useful as prior
for Gaussian nodes):
.. autosummary::
:toctree: generated/
GaussianGammaISO
GaussianGammaARD
GaussianWishart
Nodes for discrete count variables:
.. autosummary::
:toctree: generated/
Bernoulli
Binomial
Categorical
Multinomial
Poisson
Nodes for probabilities:
.. autosummary::
:toctree: generated/
Beta
Dirichlet
Nodes for dynamic variables:
.. autosummary::
:toctree: generated/
CategoricalMarkovChain
GaussianMarkovChain
SwitchingGaussianMarkovChain
VaryingGaussianMarkovChain
Other stochastic nodes:
.. autosummary::
:toctree: generated/
Mixture
Deterministic nodes
===================
.. autosummary::
:toctree: generated/
Dot
SumMultiply
Add
Gate
Take
"""
# Currently, model construction and the inference network are not separated so
# the model is constructed using variational message passing nodes.
from bayespy.inference.vmp.nodes import *
|
60743b33e5034776576073b151c7a02dc0a40b7e | tests/unit_project/test_fields.py | tests/unit_project/test_fields.py | from djangosanetesting.cases import DatabaseTestCase
from djangomarkup.fields import RichTextField
from djangomarkup.models import SourceText
from exampleapp.models import Article
class TestRichTextField(DatabaseTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
| from djangosanetesting.cases import UnitTestCase
from djangomarkup.fields import RichTextField
from exampleapp.models import Article
class TestRichTextField(UnitTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
def test_value_error_raised_when_accessing_source_without_instance(self):
field = RichTextField(
instance = None,
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
self.assert_raises(ValueError, field.get_source) | Check proper error when accessing source without instance | Check proper error when accessing source without instance
| Python | bsd-3-clause | ella/django-markup | from djangosanetesting.cases import DatabaseTestCase
from djangomarkup.fields import RichTextField
from djangomarkup.models import SourceText
from exampleapp.models import Article
class TestRichTextField(DatabaseTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
Check proper error when accessing source without instance | from djangosanetesting.cases import UnitTestCase
from djangomarkup.fields import RichTextField
from exampleapp.models import Article
class TestRichTextField(UnitTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
def test_value_error_raised_when_accessing_source_without_instance(self):
field = RichTextField(
instance = None,
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
self.assert_raises(ValueError, field.get_source) | <commit_before>from djangosanetesting.cases import DatabaseTestCase
from djangomarkup.fields import RichTextField
from djangomarkup.models import SourceText
from exampleapp.models import Article
class TestRichTextField(DatabaseTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
<commit_msg>Check proper error when accessing source without instance<commit_after> | from djangosanetesting.cases import UnitTestCase
from djangomarkup.fields import RichTextField
from exampleapp.models import Article
class TestRichTextField(UnitTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
def test_value_error_raised_when_accessing_source_without_instance(self):
field = RichTextField(
instance = None,
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
self.assert_raises(ValueError, field.get_source) | from djangosanetesting.cases import DatabaseTestCase
from djangomarkup.fields import RichTextField
from djangomarkup.models import SourceText
from exampleapp.models import Article
class TestRichTextField(DatabaseTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
Check proper error when accessing source without instancefrom djangosanetesting.cases import UnitTestCase
from djangomarkup.fields import RichTextField
from exampleapp.models import Article
class TestRichTextField(UnitTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
def test_value_error_raised_when_accessing_source_without_instance(self):
field = RichTextField(
instance = None,
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
self.assert_raises(ValueError, field.get_source) | <commit_before>from djangosanetesting.cases import DatabaseTestCase
from djangomarkup.fields import RichTextField
from djangomarkup.models import SourceText
from exampleapp.models import Article
class TestRichTextField(DatabaseTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
<commit_msg>Check proper error when accessing source without instance<commit_after>from djangosanetesting.cases import UnitTestCase
from djangomarkup.fields import RichTextField
from exampleapp.models import Article
class TestRichTextField(UnitTestCase):
def setUp(self):
super(TestRichTextField, self).setUp()
self.field = RichTextField(
instance = Article(),
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
def test_retrieve_empty_source_for_empty_article(self):
self.assert_equals(u'', self.field.get_source().content)
def test_source_available_for_empty_article(self):
self.assert_equals(u'', self.field.get_source_text())
def test_render_available_for_empty_article(self):
self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
def test_value_error_raised_when_accessing_source_without_instance(self):
field = RichTextField(
instance = None,
model = Article,
syntax_processor_name = "markdown",
field_name = "text",
required = True,
label = "Text"
)
self.assert_raises(ValueError, field.get_source) |
4313c5528efd02c45013907300b33436ce31eddd | openacademy/model/openacademy_course.py | openacademy/model/openacademy_course.py | from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
| from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one #api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| Modify copy method into inherit | [REF] openacademy: Modify copy method into inherit
| Python | apache-2.0 | mapuerta/openacademy-proyect | from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
[REF] openacademy: Modify copy method into inherit | from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one #api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| <commit_before>from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
<commit_msg>[REF] openacademy: Modify copy method into inherit<commit_after> | from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one #api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
[REF] openacademy: Modify copy method into inheritfrom openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one #api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| <commit_before>from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
<commit_msg>[REF] openacademy: Modify copy method into inherit<commit_after>from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course'
name = fields.Char(string="Title", required=True)
description = fields.Text(string="Description")
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one #api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
|
440305707dfbf9a7a321b48250245edafc42aa73 | candidates/csv_helpers.py | candidates/csv_helpers.py | from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1])
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
| from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
| Sort on first name after last name | Sort on first name after last name
| Python | agpl-3.0 | mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative | from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1])
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
Sort on first name after last name | from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
| <commit_before>from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1])
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
<commit_msg>Sort on first name after last name<commit_after> | from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
| from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1])
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
Sort on first name after last namefrom __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
| <commit_before>from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1])
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
<commit_msg>Sort on first name after last name<commit_after>from __future__ import unicode_literals
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
not row['election_current'],
row['election_date'],
row['election'],
row['post_label']
)
def _candidate_sort_by_post_key(row):
return (
not row['election_current'],
row['election_date'],
row['election'],
row['post_label'],
row['name'].split()[-1],
row['name'].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
|
217af37f3aa7856770ce30b75df28bcd3582bb79 | geotrek/trekking/tests/test_filters.py | geotrek/trekking/tests/test_filters.py | # -*- coding: utf-8 -*-
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| # -*- coding: utf-8 -*-
from geotrek.land.filters import *
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| Make sure land filters are setup when testing | Make sure land filters are setup when testing
| Python | bsd-2-clause | GeotrekCE/Geotrek-admin,camillemonchicourt/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,johan--/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek | # -*- coding: utf-8 -*-
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
Make sure land filters are setup when testing | # -*- coding: utf-8 -*-
from geotrek.land.filters import *
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| <commit_before># -*- coding: utf-8 -*-
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
<commit_msg>Make sure land filters are setup when testing<commit_after> | # -*- coding: utf-8 -*-
from geotrek.land.filters import *
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| # -*- coding: utf-8 -*-
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
Make sure land filters are setup when testing# -*- coding: utf-8 -*-
from geotrek.land.filters import *
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| <commit_before># -*- coding: utf-8 -*-
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
<commit_msg>Make sure land filters are setup when testing<commit_after># -*- coding: utf-8 -*-
from geotrek.land.filters import *
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
|
1ce1998f649cf2449c0898d2b59630d715ab7154 | smallprox/core.py | smallprox/core.py | import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip, _ = re.match('default via (.*?)\s', result).groups(1)
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
| import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip = re.match('default via (.*?)\s', result).groups(1)[0]
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
| Fix getting host ip on linux | Fix getting host ip on linux
| Python | mit | nhumrich/small-prox | import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip, _ = re.match('default via (.*?)\s', result).groups(1)
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
Fix getting host ip on linux | import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip = re.match('default via (.*?)\s', result).groups(1)[0]
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
| <commit_before>import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip, _ = re.match('default via (.*?)\s', result).groups(1)
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
<commit_msg>Fix getting host ip on linux<commit_after> | import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip = re.match('default via (.*?)\s', result).groups(1)[0]
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
| import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip, _ = re.match('default via (.*?)\s', result).groups(1)
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
Fix getting host ip on linuximport asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip = re.match('default via (.*?)\s', result).groups(1)[0]
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
| <commit_before>import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip, _ = re.match('default via (.*?)\s', result).groups(1)
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
<commit_msg>Fix getting host ip on linux<commit_after>import asyncio
import os
import logging
import re
import dns.resolver
logging.basicConfig()
from .server import HTTPServer
from .mapper import update_config, add_container
logger = logging.getLogger('small-prox')
def _get_local_address():
resolver = dns.resolver.Resolver()
try:
resolver.query('docker.for.mac.localhost')
return 'docker.for.mac.localhost'
except:
# must be on linux, get host ip
result = os.popen('ip r').read()
ip = re.match('default via (.*?)\s', result).groups(1)[0]
return ip
def main():
config = {}
if os.getenv('DEBUG') == 'true':
logger.setLevel('DEBUG')
loop = asyncio.get_event_loop()
local_ports = os.getenv('LOCAL_PORTS', [])
local_ports = local_ports and local_ports.split(',')
local_address = _get_local_address()
for port in local_ports:
add_container(None, port, config, ip=local_address)
logger.debug('Current container map: %s', config)
server = HTTPServer(loop, config)
loop.run_until_complete(server.start())
loop.create_task(update_config(config))
loop.run_forever()
|
f531cfa07ba6e6e0d36ba768dbeb4706ae7cd259 | tlslite/utils/pycrypto_rsakey.py | tlslite/utils/pycrypto_rsakey.py | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (n, e) )
else:
self.rsa = RSA.construct( (n, e, d, p, q) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
s = numberToString(m, numBytes(self.n))
c = stringToNumber(self.rsa.decrypt((s,)))
return c
def _rawPublicKeyOp(self, c):
s = numberToString(c, numBytes(self.n))
m = stringToNumber(self.rsa.encrypt(s, None)[0])
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
| # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (long(n), long(e)) )
else:
self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
c = self.rsa.decrypt((m,))
return c
def _rawPublicKeyOp(self, c):
m = self.rsa.encrypt(c, None)[0]
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
| Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working) | Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)
| Python | lgpl-2.1 | ioef/tlslite-ng,ioef/tlslite-ng,ioef/tlslite-ng | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (n, e) )
else:
self.rsa = RSA.construct( (n, e, d, p, q) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
s = numberToString(m, numBytes(self.n))
c = stringToNumber(self.rsa.decrypt((s,)))
return c
def _rawPublicKeyOp(self, c):
s = numberToString(c, numBytes(self.n))
m = stringToNumber(self.rsa.encrypt(s, None)[0])
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working) | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (long(n), long(e)) )
else:
self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
c = self.rsa.decrypt((m,))
return c
def _rawPublicKeyOp(self, c):
m = self.rsa.encrypt(c, None)[0]
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
| <commit_before># Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (n, e) )
else:
self.rsa = RSA.construct( (n, e, d, p, q) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
s = numberToString(m, numBytes(self.n))
c = stringToNumber(self.rsa.decrypt((s,)))
return c
def _rawPublicKeyOp(self, c):
s = numberToString(c, numBytes(self.n))
m = stringToNumber(self.rsa.encrypt(s, None)[0])
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
<commit_msg>Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)<commit_after> | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (long(n), long(e)) )
else:
self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
c = self.rsa.decrypt((m,))
return c
def _rawPublicKeyOp(self, c):
m = self.rsa.encrypt(c, None)[0]
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
| # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (n, e) )
else:
self.rsa = RSA.construct( (n, e, d, p, q) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
s = numberToString(m, numBytes(self.n))
c = stringToNumber(self.rsa.decrypt((s,)))
return c
def _rawPublicKeyOp(self, c):
s = numberToString(c, numBytes(self.n))
m = stringToNumber(self.rsa.encrypt(s, None)[0])
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)# Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (long(n), long(e)) )
else:
self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
c = self.rsa.decrypt((m,))
return c
def _rawPublicKeyOp(self, c):
m = self.rsa.encrypt(c, None)[0]
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
| <commit_before># Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (n, e) )
else:
self.rsa = RSA.construct( (n, e, d, p, q) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
s = numberToString(m, numBytes(self.n))
c = stringToNumber(self.rsa.decrypt((s,)))
return c
def _rawPublicKeyOp(self, c):
s = numberToString(c, numBytes(self.n))
m = stringToNumber(self.rsa.encrypt(s, None)[0])
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
<commit_msg>Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)<commit_after># Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""PyCrypto RSA implementation."""
from cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
if pycryptoLoaded:
from Crypto.PublicKey import RSA
class PyCrypto_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if not d:
self.rsa = RSA.construct( (long(n), long(e)) )
else:
self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) )
def __getattr__(self, name):
return getattr(self.rsa, name)
def hasPrivateKey(self):
return self.rsa.has_private()
def _rawPrivateKeyOp(self, m):
c = self.rsa.decrypt((m,))
return c
def _rawPublicKeyOp(self, c):
m = self.rsa.encrypt(c, None)[0]
return m
def generate(bits):
key = PyCrypto_RSAKey()
def f(numBytes):
return bytes(getRandomBytes(numBytes))
key.rsa = RSA.generate(bits, f)
return key
generate = staticmethod(generate)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.