commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
81c567e4be0d3c2f91d3cfa3d04b0b738859da6a
|
yargy/utils.py
|
yargy/utils.py
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
Add get_tokens_position and decode_roman_number functions
|
Add get_tokens_position and decode_roman_number functions
|
Python
|
mit
|
bureaucratic-labs/yargy
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
Add get_tokens_position and decode_roman_number functions
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
<commit_before>from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
<commit_msg>Add get_tokens_position and decode_roman_number functions<commit_after>
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
Add get_tokens_position and decode_roman_number functionsfrom itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
<commit_before>from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
<commit_msg>Add get_tokens_position and decode_roman_number functions<commit_after>from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
d3f09baf1e1de0272e1a579a207f685feb6c673f
|
common/mixins.py
|
common/mixins.py
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
self.slug = slugify(getattr(self, self.slugify_field))
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError("This object already exists.")
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
slugify_field_value = getattr(self, self.slugify_field)
self.slug = slugify(slugify_field_value)
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError(_("Entry with {0} - {1} already exists.".format(
self.slugify_field, slugify_field_value)))
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
Return user-friendly error message for SlugifyMixin class
|
Return user-friendly error message for SlugifyMixin class
|
Python
|
mit
|
teamtaverna/core
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
self.slug = slugify(getattr(self, self.slugify_field))
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError("This object already exists.")
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
Return user-friendly error message for SlugifyMixin class
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
slugify_field_value = getattr(self, self.slugify_field)
self.slug = slugify(slugify_field_value)
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError(_("Entry with {0} - {1} already exists.".format(
self.slugify_field, slugify_field_value)))
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
<commit_before>from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
self.slug = slugify(getattr(self, self.slugify_field))
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError("This object already exists.")
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
<commit_msg>Return user-friendly error message for SlugifyMixin class<commit_after>
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
slugify_field_value = getattr(self, self.slugify_field)
self.slug = slugify(slugify_field_value)
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError(_("Entry with {0} - {1} already exists.".format(
self.slugify_field, slugify_field_value)))
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
self.slug = slugify(getattr(self, self.slugify_field))
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError("This object already exists.")
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
Return user-friendly error message for SlugifyMixin classfrom __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
slugify_field_value = getattr(self, self.slugify_field)
self.slug = slugify(slugify_field_value)
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError(_("Entry with {0} - {1} already exists.".format(
self.slugify_field, slugify_field_value)))
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
<commit_before>from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
self.slug = slugify(getattr(self, self.slugify_field))
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError("This object already exists.")
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
<commit_msg>Return user-friendly error message for SlugifyMixin class<commit_after>from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampMixin(models.Model):
"""Mixin for date and timestamp. Inherits django's models.Model."""
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SlugifyMixin():
"""
Slugify specific field and pass as value to slug field in the model.
This mixin helps in solving the problem of having case insensitive duplicates
by creating a slug and ensuring uniqueness.
Model field to be slugified should be passed as a string into a variable
called slugify_field.
Slug field in the model should be named slug.
"""
def clean(self):
if hasattr(self, 'slugify_field') and hasattr(self, 'slug'):
slugify_field_value = getattr(self, self.slugify_field)
self.slug = slugify(slugify_field_value)
if self.__class__.objects.filter(slug=self.slug).exists():
raise ValidationError(_("Entry with {0} - {1} already exists.".format(
self.slugify_field, slugify_field_value)))
def save(self, *args, **kwargs):
self.clean()
return super().save(*args, **kwargs)
|
a816d0655504051ea12718a0e34bc9645fc92730
|
personal-site/projects/views.py
|
personal-site/projects/views.py
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.order_by('priority')
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
Remove unnecessary projects added to context
|
Remove unnecessary projects added to context
|
Python
|
bsd-3-clause
|
brandonw/personal-site,brandonw/personal-site,brandonw/personal-site
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.order_by('priority')
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
Remove unnecessary projects added to context
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
<commit_before>from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.order_by('priority')
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
<commit_msg>Remove unnecessary projects added to context<commit_after>
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.order_by('priority')
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
Remove unnecessary projects added to contextfrom django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
<commit_before>from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.order_by('priority')
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
<commit_msg>Remove unnecessary projects added to context<commit_after>from django.shortcuts import render
from django.views.generic.detail import DetailView
from projects.models import Project
from misc.code_blocks_preprocessor import CodeBlockExtension
import markdown
class ProjectDetailView(DetailView):
model = Project
context_object_name='project'
template_name = 'projects/detail.html'
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['html'] = markdown.markdown(
context['object'].full_descr,
extensions=[CodeBlockExtension()])
return context
|
2d9c4128898c8504813e6ea42eb2d634cf7e56a1
|
kepakkoconverter.py
|
kepakkoconverter.py
|
#!/usr/bin/env python3
import PIL
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
pixels = img.getdata()
rgb = ["{{{0}, {1}, {2}}}".format(*pixel) for pixel in pixels]
declaration = "byte pixels[{0}][3] = {{{1}}};".format(len(pixels),
",\n".join(rgb))
print(declaration)
if __name__ == "__main__":
print_matrix(resize_image("img/grin.png"))
|
#!/usr/bin/env python3
import PIL
import sys
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def reduce_palette(img):
"""
Reduces palette to 256 colors.
"""
return img.convert('P', dither=Image.FLOYDSTEINBERG,
palette=Image.ADAPTIVE, colors=256)
def get_palette_grouped(img):
"""
Returns the img palette as list of rgb values.
"""
palette = img.getpalette()
return zip(*(iter(palette),) * 3)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
palette = get_palette_grouped(img)
rgb = ["{{{0}, {1}, {2}}}".format(*color) for color in palette]
pal_decl = "byte palette[{0}][3] = {{{1}}};".format(len(rgb),
",\n".join(rgb))
pixels = img.getdata()
pixels_as_str = ",\n".join(str(pixel) for pixel in pixels)
pixels_decl = "byte pixels[{0}] = {{{1}}};".format(len(pixels),
pixels_as_str)
print("\n".join([pal_decl, pixels_decl]))
if __name__ == "__main__":
image_path = "img/grin.png"
if len(sys.argv) > 1:
image_path = sys.argv[1]
img = resize_image(image_path)
img_converted = reduce_palette(img)
print_matrix(img_converted)
|
Switch to using palette in the converter
|
Switch to using palette in the converter
|
Python
|
mit
|
myrjola/Valokepakko,myrjola/Valokepakko,myrjola/Valokepakko
|
#!/usr/bin/env python3
import PIL
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
pixels = img.getdata()
rgb = ["{{{0}, {1}, {2}}}".format(*pixel) for pixel in pixels]
declaration = "byte pixels[{0}][3] = {{{1}}};".format(len(pixels),
",\n".join(rgb))
print(declaration)
if __name__ == "__main__":
print_matrix(resize_image("img/grin.png"))
Switch to using palette in the converter
|
#!/usr/bin/env python3
import PIL
import sys
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def reduce_palette(img):
"""
Reduces palette to 256 colors.
"""
return img.convert('P', dither=Image.FLOYDSTEINBERG,
palette=Image.ADAPTIVE, colors=256)
def get_palette_grouped(img):
"""
Returns the img palette as list of rgb values.
"""
palette = img.getpalette()
return zip(*(iter(palette),) * 3)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
palette = get_palette_grouped(img)
rgb = ["{{{0}, {1}, {2}}}".format(*color) for color in palette]
pal_decl = "byte palette[{0}][3] = {{{1}}};".format(len(rgb),
",\n".join(rgb))
pixels = img.getdata()
pixels_as_str = ",\n".join(str(pixel) for pixel in pixels)
pixels_decl = "byte pixels[{0}] = {{{1}}};".format(len(pixels),
pixels_as_str)
print("\n".join([pal_decl, pixels_decl]))
if __name__ == "__main__":
image_path = "img/grin.png"
if len(sys.argv) > 1:
image_path = sys.argv[1]
img = resize_image(image_path)
img_converted = reduce_palette(img)
print_matrix(img_converted)
|
<commit_before>#!/usr/bin/env python3
import PIL
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
pixels = img.getdata()
rgb = ["{{{0}, {1}, {2}}}".format(*pixel) for pixel in pixels]
declaration = "byte pixels[{0}][3] = {{{1}}};".format(len(pixels),
",\n".join(rgb))
print(declaration)
if __name__ == "__main__":
print_matrix(resize_image("img/grin.png"))
<commit_msg>Switch to using palette in the converter<commit_after>
|
#!/usr/bin/env python3
import PIL
import sys
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def reduce_palette(img):
"""
Reduces palette to 256 colors.
"""
return img.convert('P', dither=Image.FLOYDSTEINBERG,
palette=Image.ADAPTIVE, colors=256)
def get_palette_grouped(img):
"""
Returns the img palette as list of rgb values.
"""
palette = img.getpalette()
return zip(*(iter(palette),) * 3)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
palette = get_palette_grouped(img)
rgb = ["{{{0}, {1}, {2}}}".format(*color) for color in palette]
pal_decl = "byte palette[{0}][3] = {{{1}}};".format(len(rgb),
",\n".join(rgb))
pixels = img.getdata()
pixels_as_str = ",\n".join(str(pixel) for pixel in pixels)
pixels_decl = "byte pixels[{0}] = {{{1}}};".format(len(pixels),
pixels_as_str)
print("\n".join([pal_decl, pixels_decl]))
if __name__ == "__main__":
image_path = "img/grin.png"
if len(sys.argv) > 1:
image_path = sys.argv[1]
img = resize_image(image_path)
img_converted = reduce_palette(img)
print_matrix(img_converted)
|
#!/usr/bin/env python3
import PIL
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
pixels = img.getdata()
rgb = ["{{{0}, {1}, {2}}}".format(*pixel) for pixel in pixels]
declaration = "byte pixels[{0}][3] = {{{1}}};".format(len(pixels),
",\n".join(rgb))
print(declaration)
if __name__ == "__main__":
print_matrix(resize_image("img/grin.png"))
Switch to using palette in the converter#!/usr/bin/env python3
import PIL
import sys
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def reduce_palette(img):
"""
Reduces palette to 256 colors.
"""
return img.convert('P', dither=Image.FLOYDSTEINBERG,
palette=Image.ADAPTIVE, colors=256)
def get_palette_grouped(img):
"""
Returns the img palette as list of rgb values.
"""
palette = img.getpalette()
return zip(*(iter(palette),) * 3)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
palette = get_palette_grouped(img)
rgb = ["{{{0}, {1}, {2}}}".format(*color) for color in palette]
pal_decl = "byte palette[{0}][3] = {{{1}}};".format(len(rgb),
",\n".join(rgb))
pixels = img.getdata()
pixels_as_str = ",\n".join(str(pixel) for pixel in pixels)
pixels_decl = "byte pixels[{0}] = {{{1}}};".format(len(pixels),
pixels_as_str)
print("\n".join([pal_decl, pixels_decl]))
if __name__ == "__main__":
image_path = "img/grin.png"
if len(sys.argv) > 1:
image_path = sys.argv[1]
img = resize_image(image_path)
img_converted = reduce_palette(img)
print_matrix(img_converted)
|
<commit_before>#!/usr/bin/env python3
import PIL
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
pixels = img.getdata()
rgb = ["{{{0}, {1}, {2}}}".format(*pixel) for pixel in pixels]
declaration = "byte pixels[{0}][3] = {{{1}}};".format(len(pixels),
",\n".join(rgb))
print(declaration)
if __name__ == "__main__":
print_matrix(resize_image("img/grin.png"))
<commit_msg>Switch to using palette in the converter<commit_after>#!/usr/bin/env python3
import PIL
import sys
from PIL import Image
LED_COUNT = 60
def resize_image(path):
img = Image.open(path)
old_width = img.size[0]
old_height = img.size[1]
ratio = 60.0/old_height
return img.resize((int(old_width*ratio), int(old_height*ratio)),
PIL.Image.ANTIALIAS)
def reduce_palette(img):
"""
Reduces palette to 256 colors.
"""
return img.convert('P', dither=Image.FLOYDSTEINBERG,
palette=Image.ADAPTIVE, colors=256)
def get_palette_grouped(img):
"""
Returns the img palette as list of rgb values.
"""
palette = img.getpalette()
return zip(*(iter(palette),) * 3)
def print_matrix(img):
"""
Arduino matrices can be defined like this:
byte my2dArray[2][3]={
{1, 2, 3},
{2, 3, 4}
};
"""
palette = get_palette_grouped(img)
rgb = ["{{{0}, {1}, {2}}}".format(*color) for color in palette]
pal_decl = "byte palette[{0}][3] = {{{1}}};".format(len(rgb),
",\n".join(rgb))
pixels = img.getdata()
pixels_as_str = ",\n".join(str(pixel) for pixel in pixels)
pixels_decl = "byte pixels[{0}] = {{{1}}};".format(len(pixels),
pixels_as_str)
print("\n".join([pal_decl, pixels_decl]))
if __name__ == "__main__":
image_path = "img/grin.png"
if len(sys.argv) > 1:
image_path = sys.argv[1]
img = resize_image(image_path)
img_converted = reduce_palette(img)
print_matrix(img_converted)
|
4c080197dce0d452047203dbf06dd160086fcbdf
|
website/snat/forms.py
|
website/snat/forms.py
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField
from wtforms.validators import Required, IPAddress
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!')])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的ip 地址!')])
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField, ValidationError
from wtforms.validators import Required, IPAddress
def IPorNet(message=u"无效的IP 或网段!"):
def _ipornet(form, field):
value = field.data
ip = value.split('/')[0]
if '/' in value:
try:
mask = int(value.split('/')[1])
except:
raise ValidationError(message)
if mask < 0 or mask > 32:
raise ValidationError(message)
parts = ip.split('.')
if len(parts) == 4 and all(x.isdigit() for x in parts):
numbers = list(int(x) for x in parts)
if not all(num >= 0 and num < 256 for num in numbers):
raise ValidationError(message)
return True
raise ValidationError(message)
return _ipornet
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!'),
IPorNet(message=u"无效的IP 或网段!")])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的IP 地址!')])
|
Add snat ip or net validator.
|
Add snat ip or net validator.
|
Python
|
bsd-3-clause
|
sdgdsffdsfff/FlexGW,sdgdsffdsfff/FlexGW,alibaba/FlexGW,alibaba/FlexGW,sdgdsffdsfff/FlexGW,sdgdsffdsfff/FlexGW,alibaba/FlexGW,alibaba/FlexGW
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField
from wtforms.validators import Required, IPAddress
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!')])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的ip 地址!')])
Add snat ip or net validator.
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField, ValidationError
from wtforms.validators import Required, IPAddress
def IPorNet(message=u"无效的IP 或网段!"):
def _ipornet(form, field):
value = field.data
ip = value.split('/')[0]
if '/' in value:
try:
mask = int(value.split('/')[1])
except:
raise ValidationError(message)
if mask < 0 or mask > 32:
raise ValidationError(message)
parts = ip.split('.')
if len(parts) == 4 and all(x.isdigit() for x in parts):
numbers = list(int(x) for x in parts)
if not all(num >= 0 and num < 256 for num in numbers):
raise ValidationError(message)
return True
raise ValidationError(message)
return _ipornet
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!'),
IPorNet(message=u"无效的IP 或网段!")])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的IP 地址!')])
|
<commit_before># -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField
from wtforms.validators import Required, IPAddress
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!')])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的ip 地址!')])
<commit_msg>Add snat ip or net validator.<commit_after>
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField, ValidationError
from wtforms.validators import Required, IPAddress
def IPorNet(message=u"无效的IP 或网段!"):
def _ipornet(form, field):
value = field.data
ip = value.split('/')[0]
if '/' in value:
try:
mask = int(value.split('/')[1])
except:
raise ValidationError(message)
if mask < 0 or mask > 32:
raise ValidationError(message)
parts = ip.split('.')
if len(parts) == 4 and all(x.isdigit() for x in parts):
numbers = list(int(x) for x in parts)
if not all(num >= 0 and num < 256 for num in numbers):
raise ValidationError(message)
return True
raise ValidationError(message)
return _ipornet
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!'),
IPorNet(message=u"无效的IP 或网段!")])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的IP 地址!')])
|
# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField
from wtforms.validators import Required, IPAddress
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!')])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的ip 地址!')])
Add snat ip or net validator.# -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField, ValidationError
from wtforms.validators import Required, IPAddress
def IPorNet(message=u"无效的IP 或网段!"):
def _ipornet(form, field):
value = field.data
ip = value.split('/')[0]
if '/' in value:
try:
mask = int(value.split('/')[1])
except:
raise ValidationError(message)
if mask < 0 or mask > 32:
raise ValidationError(message)
parts = ip.split('.')
if len(parts) == 4 and all(x.isdigit() for x in parts):
numbers = list(int(x) for x in parts)
if not all(num >= 0 and num < 256 for num in numbers):
raise ValidationError(message)
return True
raise ValidationError(message)
return _ipornet
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!'),
IPorNet(message=u"无效的IP 或网段!")])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的IP 地址!')])
|
<commit_before># -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField
from wtforms.validators import Required, IPAddress
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!')])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的ip 地址!')])
<commit_msg>Add snat ip or net validator.<commit_after># -*- coding: utf-8 -*-
"""
website.snat.forms
~~~~~~~~~~~~~~~~~~
vpn forms:
/sant
:copyright: (c) 2014 by xiong.xiaox(xiong.xiaox@alibaba-inc.com).
"""
from flask_wtf import Form
from wtforms import TextField, ValidationError
from wtforms.validators import Required, IPAddress
def IPorNet(message=u"无效的IP 或网段!"):
def _ipornet(form, field):
value = field.data
ip = value.split('/')[0]
if '/' in value:
try:
mask = int(value.split('/')[1])
except:
raise ValidationError(message)
if mask < 0 or mask > 32:
raise ValidationError(message)
parts = ip.split('.')
if len(parts) == 4 and all(x.isdigit() for x in parts):
numbers = list(int(x) for x in parts)
if not all(num >= 0 and num < 256 for num in numbers):
raise ValidationError(message)
return True
raise ValidationError(message)
return _ipornet
class SnatForm(Form):
source = TextField(u'SNAT源IP(段)',
validators=[Required(message=u'这是一个必选项!'),
IPorNet(message=u"无效的IP 或网段!")])
gateway = TextField(u'SNAT转发IP',
validators=[Required(message=u'这是一个必选项!'),
IPAddress(message=u'无效的IP 地址!')])
|
6e05ed3d47ab2e98b68ee284ab68cf1b0fc4e2af
|
www/tests/test_aio.py
|
www/tests/test_aio.py
|
from browser import console
import asyncio
from async_manager import AsyncTestManager
aio = AsyncTestManager()
async def wait_secs(s, result):
await asyncio.sleep(s)
console.log("Returning result", result)
return result
@aio.async_test(0.5)
def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = asyncio.ensure_future(coro_wait_secs)
console.log("asserting")
assert asyncio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert asyncio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
assert isinstance(fut, asyncio.Future), "ensure_future should return a future"
console.log("yielding")
result = yield from fut
console.log("asserting")
assert fut.result() == result, "yield from future should return its result"
assert result == 10, "Future result different from expected"
|
from browser import console, aio
async def wait_secs(s, result):
await aio.sleep(s)
console.log("Returning result", result)
return result
async def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = await coro_wait_secs
console.log("asserting")
assert aio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert aio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
console.log("asserts ok")
assert fut == 10, "Future result different from expected"
aio.run(test_simple_coroutine())
|
Replace asyncio tests by browser.aio tests
|
Replace asyncio tests by browser.aio tests
|
Python
|
bsd-3-clause
|
brython-dev/brython,kikocorreoso/brython,kikocorreoso/brython,brython-dev/brython,kikocorreoso/brython,brython-dev/brython
|
from browser import console
import asyncio
from async_manager import AsyncTestManager
aio = AsyncTestManager()
async def wait_secs(s, result):
await asyncio.sleep(s)
console.log("Returning result", result)
return result
@aio.async_test(0.5)
def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = asyncio.ensure_future(coro_wait_secs)
console.log("asserting")
assert asyncio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert asyncio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
assert isinstance(fut, asyncio.Future), "ensure_future should return a future"
console.log("yielding")
result = yield from fut
console.log("asserting")
assert fut.result() == result, "yield from future should return its result"
assert result == 10, "Future result different from expected"
Replace asyncio tests by browser.aio tests
|
from browser import console, aio
async def wait_secs(s, result):
await aio.sleep(s)
console.log("Returning result", result)
return result
async def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = await coro_wait_secs
console.log("asserting")
assert aio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert aio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
console.log("asserts ok")
assert fut == 10, "Future result different from expected"
aio.run(test_simple_coroutine())
|
<commit_before>from browser import console
import asyncio
from async_manager import AsyncTestManager
aio = AsyncTestManager()
async def wait_secs(s, result):
await asyncio.sleep(s)
console.log("Returning result", result)
return result
@aio.async_test(0.5)
def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = asyncio.ensure_future(coro_wait_secs)
console.log("asserting")
assert asyncio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert asyncio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
assert isinstance(fut, asyncio.Future), "ensure_future should return a future"
console.log("yielding")
result = yield from fut
console.log("asserting")
assert fut.result() == result, "yield from future should return its result"
assert result == 10, "Future result different from expected"
<commit_msg>Replace asyncio tests by browser.aio tests<commit_after>
|
from browser import console, aio
async def wait_secs(s, result):
await aio.sleep(s)
console.log("Returning result", result)
return result
async def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = await coro_wait_secs
console.log("asserting")
assert aio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert aio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
console.log("asserts ok")
assert fut == 10, "Future result different from expected"
aio.run(test_simple_coroutine())
|
from browser import console
import asyncio
from async_manager import AsyncTestManager
aio = AsyncTestManager()
async def wait_secs(s, result):
await asyncio.sleep(s)
console.log("Returning result", result)
return result
@aio.async_test(0.5)
def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = asyncio.ensure_future(coro_wait_secs)
console.log("asserting")
assert asyncio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert asyncio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
assert isinstance(fut, asyncio.Future), "ensure_future should return a future"
console.log("yielding")
result = yield from fut
console.log("asserting")
assert fut.result() == result, "yield from future should return its result"
assert result == 10, "Future result different from expected"
Replace asyncio tests by browser.aio testsfrom browser import console, aio
async def wait_secs(s, result):
await aio.sleep(s)
console.log("Returning result", result)
return result
async def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = await coro_wait_secs
console.log("asserting")
assert aio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert aio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
console.log("asserts ok")
assert fut == 10, "Future result different from expected"
aio.run(test_simple_coroutine())
|
<commit_before>from browser import console
import asyncio
from async_manager import AsyncTestManager
aio = AsyncTestManager()
async def wait_secs(s, result):
await asyncio.sleep(s)
console.log("Returning result", result)
return result
@aio.async_test(0.5)
def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = asyncio.ensure_future(coro_wait_secs)
console.log("asserting")
assert asyncio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert asyncio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
assert isinstance(fut, asyncio.Future), "ensure_future should return a future"
console.log("yielding")
result = yield from fut
console.log("asserting")
assert fut.result() == result, "yield from future should return its result"
assert result == 10, "Future result different from expected"
<commit_msg>Replace asyncio tests by browser.aio tests<commit_after>from browser import console, aio
async def wait_secs(s, result):
await aio.sleep(s)
console.log("Returning result", result)
return result
async def test_simple_coroutine():
console.log("coro_wait_secs")
coro_wait_secs = wait_secs(0.1, 10)
console.log("ensuring future")
fut = await coro_wait_secs
console.log("asserting")
assert aio.iscoroutine(coro_wait_secs), "Result of running a coroutine function should be a coroutine object"
assert aio.iscoroutinefunction(wait_secs), "asyncio.coroutine decorator should return a coroutine function"
console.log("asserts ok")
assert fut == 10, "Future result different from expected"
aio.run(test_simple_coroutine())
|
655218d603a836ebae0229394f929b70476f3def
|
climlab/__init__.py
|
climlab/__init__.py
|
__version__ = '0.2.12'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
__version__ = '0.2.13'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
Increment version number to 0.2.13
|
Increment version number to 0.2.13
Merged Moritz's new process modules.
|
Python
|
mit
|
cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab,brian-rose/climlab,cjcardinale/climlab
|
__version__ = '0.2.12'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
Increment version number to 0.2.13
Merged Moritz's new process modules.
|
__version__ = '0.2.13'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
<commit_before>__version__ = '0.2.12'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
<commit_msg>Increment version number to 0.2.13
Merged Moritz's new process modules.<commit_after>
|
__version__ = '0.2.13'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
__version__ = '0.2.12'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
Increment version number to 0.2.13
Merged Moritz's new process modules.__version__ = '0.2.13'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
<commit_before>__version__ = '0.2.12'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
<commit_msg>Increment version number to 0.2.13
Merged Moritz's new process modules.<commit_after>__version__ = '0.2.13'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
6808758a22e6bb0235038c01366fcbc250e60f84
|
nlppln/commands/freqs.py
|
nlppln/commands/freqs.py
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files
from nlppln.liwc_tokenized import split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files, split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
Use utility function split for splitting text
|
Use utility function split for splitting text
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files
from nlppln.liwc_tokenized import split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
Use utility function split for splitting text
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files, split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
<commit_before>#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files
from nlppln.liwc_tokenized import split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
<commit_msg>Use utility function split for splitting text<commit_after>
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files, split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files
from nlppln.liwc_tokenized import split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
Use utility function split for splitting text#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files, split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
<commit_before>#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files
from nlppln.liwc_tokenized import split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
<commit_msg>Use utility function split for splitting text<commit_after>#!/usr/bin/env python
import os
import click
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from nlppln.utils import create_dirs, get_files, split
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.option('--out_dir', '-o', default=os.getcwd(), type=click.Path())
@click.option('--name', '-n', default='freqs.csv')
def freqs(in_dir, out_dir, name):
out_file = os.path.join(out_dir, name)
create_dirs(out_file)
in_files = get_files(in_dir)
vectorizer = CountVectorizer(input='filename', tokenizer=split)
X = vectorizer.fit_transform(in_files)
freqs = np.array(X.sum(axis=0)).squeeze()
vocab_df = pd.DataFrame(
{'word': vectorizer.get_feature_names(), 'freq': freqs})
vocab_df['rank'] = vocab_df['freq'].rank(method='first', ascending=False)
vocab_df = vocab_df.sort('rank')
vocab_df.to_csv(out_file, encoding='utf-8', index=False)
if __name__ == '__main__':
freqs()
|
3651d4076899f86f3b6627b0fd7e8af197c5149c
|
bin/pympit_fork.py
|
bin/pympit_fork.py
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
start = MPI.Wtime()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.Barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.Barrier()
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
Add communicator split to forking test.
|
Add communicator split to forking test.
|
Python
|
bsd-2-clause
|
tskisner/pympit,tskisner/pympit
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
start = MPI.Wtime()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.Barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.Barrier()
Add communicator split to forking test.
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
start = MPI.Wtime()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.Barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.Barrier()
<commit_msg>Add communicator split to forking test.<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
start = MPI.Wtime()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.Barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.Barrier()
Add communicator split to forking test.#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
start = MPI.Wtime()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.Barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.Barrier()
<commit_msg>Add communicator split to forking test.<commit_after>#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import scipy as sc
from astropy.io import fits
import argparse
import subprocess as sp
import pympit as pt
parser = argparse.ArgumentParser(description='Run an MPI test in python with subprocess forking')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
local_out = []
proc = sp.Popen(['pympit_worker.py'], stdout=sp.PIPE, stderr=sp.PIPE)
outs, errs = proc.communicate()
proc.wait()
local_out.append(outs)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
d19650235ac95839481d05bd8867afe486fea5c3
|
nosexcover/nosexcover.py
|
nosexcover/nosexcover.py
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile='coverage.xml')
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
parser.add_option('--xcoverage-file', action='store',
default=env.get('NOSE_XCOVER_FILE', 'coverage.xml'),
dest='xcoverage_file',
metavar="FILE",
help='Path to xml file to store the coverage report in. '
'Default is coverage.xml in the working directory. '
'[NOSE_XCOVERAGE_FILE]')
def configure(self, options, config):
super(XCoverage, self).configure(options, config)
self.xcoverageFile = options.xcoverage_file
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile=self.xcoverageFile)
|
Set xml file path to store the coverage report in with --xcoverage-file option
|
Set xml file path to store the coverage report in with --xcoverage-file option
|
Python
|
mit
|
cmheisel/nose-xcover,andresriancho/nose-xcover,alex/nose-xcover
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile='coverage.xml')
Set xml file path to store the coverage report in with --xcoverage-file option
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
parser.add_option('--xcoverage-file', action='store',
default=env.get('NOSE_XCOVER_FILE', 'coverage.xml'),
dest='xcoverage_file',
metavar="FILE",
help='Path to xml file to store the coverage report in. '
'Default is coverage.xml in the working directory. '
'[NOSE_XCOVERAGE_FILE]')
def configure(self, options, config):
super(XCoverage, self).configure(options, config)
self.xcoverageFile = options.xcoverage_file
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile=self.xcoverageFile)
|
<commit_before>"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile='coverage.xml')
<commit_msg>Set xml file path to store the coverage report in with --xcoverage-file option<commit_after>
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
parser.add_option('--xcoverage-file', action='store',
default=env.get('NOSE_XCOVER_FILE', 'coverage.xml'),
dest='xcoverage_file',
metavar="FILE",
help='Path to xml file to store the coverage report in. '
'Default is coverage.xml in the working directory. '
'[NOSE_XCOVERAGE_FILE]')
def configure(self, options, config):
super(XCoverage, self).configure(options, config)
self.xcoverageFile = options.xcoverage_file
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile=self.xcoverageFile)
|
"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile='coverage.xml')
Set xml file path to store the coverage report in with --xcoverage-file option"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
parser.add_option('--xcoverage-file', action='store',
default=env.get('NOSE_XCOVER_FILE', 'coverage.xml'),
dest='xcoverage_file',
metavar="FILE",
help='Path to xml file to store the coverage report in. '
'Default is coverage.xml in the working directory. '
'[NOSE_XCOVERAGE_FILE]')
def configure(self, options, config):
super(XCoverage, self).configure(options, config)
self.xcoverageFile = options.xcoverage_file
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile=self.xcoverageFile)
|
<commit_before>"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile='coverage.xml')
<commit_msg>Set xml file path to store the coverage report in with --xcoverage-file option<commit_after>"""Companion to nose.plugins.cover. Enable by adding --with-xcoverage to your
arguments. A Cobertura-style XML file, honoring the options you pass to
--with-coverage, will be generated in coverage.xml"""
import logging
import sys
from nose.plugins import cover, Plugin
log = logging.getLogger('nose.plugins.xcover')
class XCoverage(cover.Coverage):
"""
Add Cobertura-style XML coverage reports to the built-in nose.plugins.cover plugin.
"""
def options(self, parser, env):
"""
Add options to command line.
"""
Plugin.options(self, parser, env)
parser.add_option('--xcoverage-file', action='store',
default=env.get('NOSE_XCOVER_FILE', 'coverage.xml'),
dest='xcoverage_file',
metavar="FILE",
help='Path to xml file to store the coverage report in. '
'Default is coverage.xml in the working directory. '
'[NOSE_XCOVERAGE_FILE]')
def configure(self, options, config):
super(XCoverage, self).configure(options, config)
self.xcoverageFile = options.xcoverage_file
def report(self, stream):
"""
Output code coverage report.
"""
import coverage
coverage.stop()
modules = [ module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module) ]
log.debug("Coverage report will cover modules: %s", modules)
morfs = [ m.__file__ for m in modules if hasattr(m, '__file__') ]
coverage._the_coverage.xml_report(morfs, outfile=self.xcoverageFile)
|
6f60d2c76ece73e8f37f2ae1014cc26b485495d0
|
numpy/distutils/setup.py
|
numpy/distutils/setup.py
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Make the gfortran/vs2003 hack source file known to distutils.
|
Make the gfortran/vs2003 hack source file known to distutils.
|
Python
|
bsd-3-clause
|
simongibbons/numpy,ContinuumIO/numpy,BabeNovelty/numpy,rherault-insa/numpy,cjermain/numpy,madphysicist/numpy,ahaldane/numpy,BabeNovelty/numpy,abalkin/numpy,naritta/numpy,dimasad/numpy,dch312/numpy,GrimDerp/numpy,MichaelAquilina/numpy,has2k1/numpy,jankoslavic/numpy,sonnyhu/numpy,madphysicist/numpy,ogrisel/numpy,gmcastil/numpy,Anwesh43/numpy,dwillmer/numpy,Linkid/numpy,larsmans/numpy,dato-code/numpy,embray/numpy,ajdawson/numpy,njase/numpy,SiccarPoint/numpy,ajdawson/numpy,KaelChen/numpy,MaPePeR/numpy,shoyer/numpy,seberg/numpy,kirillzhuravlev/numpy,embray/numpy,SunghanKim/numpy,mindw/numpy,brandon-rhodes/numpy,ssanderson/numpy,simongibbons/numpy,mattip/numpy,embray/numpy,cowlicks/numpy,skymanaditya1/numpy,b-carter/numpy,dwillmer/numpy,seberg/numpy,MSeifert04/numpy,brandon-rhodes/numpy,sinhrks/numpy,mattip/numpy,endolith/numpy,MSeifert04/numpy,nguyentu1602/numpy,bmorris3/numpy,andsor/numpy,cjermain/numpy,brandon-rhodes/numpy,madphysicist/numpy,pdebuyl/numpy,ewmoore/numpy,ChanderG/numpy,WillieMaddox/numpy,ddasilva/numpy,ahaldane/numpy,rgommers/numpy,mindw/numpy,behzadnouri/numpy,mingwpy/numpy,Anwesh43/numpy,pelson/numpy,stefanv/numpy,Yusa95/numpy,pyparallel/numpy,NextThought/pypy-numpy,leifdenby/numpy,stuarteberg/numpy,jankoslavic/numpy,pbrod/numpy,dwf/numpy,Srisai85/numpy,sonnyhu/numpy,sinhrks/numpy,pelson/numpy,ahaldane/numpy,chiffa/numpy,dch312/numpy,NextThought/pypy-numpy,mhvk/numpy,ChanderG/numpy,sonnyhu/numpy,moreati/numpy,ewmoore/numpy,matthew-brett/numpy,rmcgibbo/numpy,dato-code/numpy,KaelChen/numpy,musically-ut/numpy,moreati/numpy,GrimDerp/numpy,Eric89GXL/numpy,joferkington/numpy,matthew-brett/numpy,hainm/numpy,mindw/numpy,bmorris3/numpy,jakirkham/numpy,dimasad/numpy,MichaelAquilina/numpy,behzadnouri/numpy,numpy/numpy-refactor,Yusa95/numpy,SiccarPoint/numpy,trankmichael/numpy,dwillmer/numpy,seberg/numpy,bringingheavendown/numpy,mattip/numpy,ekalosak/numpy,utke1/numpy,immerrr/numpy,felipebetancur/numpy,endolith/numpy,utke1/numpy,ewmoore/numpy,grlee77/numpy,simongibbons/numpy,njase/numpy,jorisvandenbossche/numpy,matthew-brett/numpy,stuarteberg/numpy,joferkington/numpy,charris/numpy,grlee77/numpy,ssanderson/numpy,sinhrks/numpy,CMartelLML/numpy,groutr/numpy,GaZ3ll3/numpy,joferkington/numpy,bmorris3/numpy,pizzathief/numpy,chatcannon/numpy,grlee77/numpy,has2k1/numpy,pdebuyl/numpy,tacaswell/numpy,pizzathief/numpy,ContinuumIO/numpy,dwf/numpy,NextThought/pypy-numpy,simongibbons/numpy,jschueller/numpy,rudimeier/numpy,abalkin/numpy,numpy/numpy-refactor,MaPePeR/numpy,ddasilva/numpy,ddasilva/numpy,ChristopherHogan/numpy,rmcgibbo/numpy,bringingheavendown/numpy,rhythmsosad/numpy,dwillmer/numpy,andsor/numpy,ogrisel/numpy,dato-code/numpy,Anwesh43/numpy,kirillzhuravlev/numpy,immerrr/numpy,mindw/numpy,pizzathief/numpy,pyparallel/numpy,stefanv/numpy,argriffing/numpy,drasmuss/numpy,has2k1/numpy,njase/numpy,jonathanunderwood/numpy,naritta/numpy,gfyoung/numpy,mwiebe/numpy,felipebetancur/numpy,ESSS/numpy,charris/numpy,numpy/numpy,MSeifert04/numpy,naritta/numpy,Srisai85/numpy,ChristopherHogan/numpy,BMJHayward/numpy,maniteja123/numpy,rgommers/numpy,BMJHayward/numpy,CMartelLML/numpy,rherault-insa/numpy,mhvk/numpy,groutr/numpy,musically-ut/numpy,ChristopherHogan/numpy,seberg/numpy,stefanv/numpy,mathdd/numpy,empeeu/numpy,nbeaver/numpy,charris/numpy,chatcannon/numpy,ViralLeadership/numpy,Yusa95/numpy,ahaldane/numpy,andsor/numpy,mingwpy/numpy,Anwesh43/numpy,CMartelLML/numpy,tynn/numpy,mathdd/numpy,mathdd/numpy,sigma-random/numpy,utke1/numpy,GrimDerp/numpy,rgommers/numpy,stefanv/numpy,SiccarPoint/numpy,nbeaver/numpy,Linkid/numpy,nguyentu1602/numpy,dwf/numpy,stefanv/numpy,skwbc/numpy,mhvk/numpy,bmorris3/numpy,rudimeier/numpy,pbrod/numpy,mortada/numpy,anntzer/numpy,rmcgibbo/numpy,grlee77/numpy,pizzathief/numpy,jschueller/numpy,kirillzhuravlev/numpy,ekalosak/numpy,tacaswell/numpy,pelson/numpy,rhythmsosad/numpy,musically-ut/numpy,ssanderson/numpy,grlee77/numpy,pelson/numpy,BabeNovelty/numpy,rmcgibbo/numpy,astrofrog/numpy,yiakwy/numpy,cowlicks/numpy,jakirkham/numpy,jorisvandenbossche/numpy,mortada/numpy,tynn/numpy,WillieMaddox/numpy,nbeaver/numpy,skwbc/numpy,Srisai85/numpy,numpy/numpy,kiwifb/numpy,andsor/numpy,ESSS/numpy,mingwpy/numpy,pdebuyl/numpy,githubmlai/numpy,argriffing/numpy,rherault-insa/numpy,GaZ3ll3/numpy,abalkin/numpy,pizzathief/numpy,matthew-brett/numpy,jorisvandenbossche/numpy,ewmoore/numpy,SunghanKim/numpy,astrofrog/numpy,gmcastil/numpy,ewmoore/numpy,rajathkumarmp/numpy,sinhrks/numpy,dato-code/numpy,numpy/numpy-refactor,MaPePeR/numpy,Dapid/numpy,dimasad/numpy,matthew-brett/numpy,endolith/numpy,NextThought/pypy-numpy,mortada/numpy,sigma-random/numpy,kiwifb/numpy,nguyentu1602/numpy,bertrand-l/numpy,b-carter/numpy,rhythmsosad/numpy,ekalosak/numpy,sonnyhu/numpy,shoyer/numpy,musically-ut/numpy,skwbc/numpy,yiakwy/numpy,numpy/numpy-refactor,gfyoung/numpy,sigma-random/numpy,jonathanunderwood/numpy,tacaswell/numpy,joferkington/numpy,hainm/numpy,shoyer/numpy,ViralLeadership/numpy,rajathkumarmp/numpy,ekalosak/numpy,astrofrog/numpy,mwiebe/numpy,BMJHayward/numpy,MSeifert04/numpy,kiwifb/numpy,WarrenWeckesser/numpy,tdsmith/numpy,AustereCuriosity/numpy,larsmans/numpy,skymanaditya1/numpy,pbrod/numpy,ogrisel/numpy,GaZ3ll3/numpy,jonathanunderwood/numpy,nguyentu1602/numpy,chatcannon/numpy,cowlicks/numpy,SunghanKim/numpy,jakirkham/numpy,pbrod/numpy,ajdawson/numpy,mathdd/numpy,rhythmsosad/numpy,solarjoe/numpy,hainm/numpy,MichaelAquilina/numpy,ChanderG/numpy,embray/numpy,mortada/numpy,WarrenWeckesser/numpy,WarrenWeckesser/numpy,chiffa/numpy,jakirkham/numpy,felipebetancur/numpy,CMartelLML/numpy,Dapid/numpy,bringingheavendown/numpy,githubmlai/numpy,jankoslavic/numpy,gfyoung/numpy,embray/numpy,mhvk/numpy,tynn/numpy,skymanaditya1/numpy,ahaldane/numpy,astrofrog/numpy,argriffing/numpy,jankoslavic/numpy,simongibbons/numpy,immerrr/numpy,SiccarPoint/numpy,mingwpy/numpy,has2k1/numpy,AustereCuriosity/numpy,leifdenby/numpy,brandon-rhodes/numpy,numpy/numpy,ViralLeadership/numpy,cjermain/numpy,solarjoe/numpy,githubmlai/numpy,BabeNovelty/numpy,Eric89GXL/numpy,jakirkham/numpy,empeeu/numpy,ogrisel/numpy,numpy/numpy,ajdawson/numpy,ogrisel/numpy,bertrand-l/numpy,WillieMaddox/numpy,KaelChen/numpy,bertrand-l/numpy,Linkid/numpy,kirillzhuravlev/numpy,charris/numpy,rudimeier/numpy,solarjoe/numpy,drasmuss/numpy,ESSS/numpy,Dapid/numpy,dch312/numpy,yiakwy/numpy,cjermain/numpy,trankmichael/numpy,dimasad/numpy,naritta/numpy,dwf/numpy,BMJHayward/numpy,rudimeier/numpy,Srisai85/numpy,stuarteberg/numpy,drasmuss/numpy,groutr/numpy,larsmans/numpy,shoyer/numpy,jorisvandenbossche/numpy,yiakwy/numpy,KaelChen/numpy,numpy/numpy-refactor,MichaelAquilina/numpy,skymanaditya1/numpy,tdsmith/numpy,GrimDerp/numpy,madphysicist/numpy,WarrenWeckesser/numpy,moreati/numpy,madphysicist/numpy,Eric89GXL/numpy,empeeu/numpy,astrofrog/numpy,pyparallel/numpy,jorisvandenbossche/numpy,mhvk/numpy,Linkid/numpy,maniteja123/numpy,cowlicks/numpy,SunghanKim/numpy,WarrenWeckesser/numpy,immerrr/numpy,behzadnouri/numpy,chiffa/numpy,gmcastil/numpy,stuarteberg/numpy,pelson/numpy,shoyer/numpy,anntzer/numpy,ChristopherHogan/numpy,ChanderG/numpy,felipebetancur/numpy,maniteja123/numpy,AustereCuriosity/numpy,dwf/numpy,rgommers/numpy,larsmans/numpy,jschueller/numpy,pdebuyl/numpy,leifdenby/numpy,sigma-random/numpy,MaPePeR/numpy,mattip/numpy,tdsmith/numpy,MSeifert04/numpy,trankmichael/numpy,rajathkumarmp/numpy,mwiebe/numpy,tdsmith/numpy,ContinuumIO/numpy,Yusa95/numpy,pbrod/numpy,GaZ3ll3/numpy,dch312/numpy,trankmichael/numpy,Eric89GXL/numpy,githubmlai/numpy,anntzer/numpy,anntzer/numpy,endolith/numpy,hainm/numpy,empeeu/numpy,b-carter/numpy,rajathkumarmp/numpy,jschueller/numpy
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
Make the gfortran/vs2003 hack source file known to distutils.
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
<commit_msg>Make the gfortran/vs2003 hack source file known to distutils.<commit_after>
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
Make the gfortran/vs2003 hack source file known to distutils.#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
<commit_msg>Make the gfortran/vs2003 hack source file known to distutils.<commit_after>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils',parent_package,top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
e601172065ca3959c1399608c294243fa2b83cef
|
tests/test_SwitchController.py
|
tests/test_SwitchController.py
|
import unittest
from mpf.system.machine import MachineController
from tests.MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def testIsActiveTimeing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
|
from tests.MpfTestCase import MpfTestCase
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def test_is_active_timing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
def test_initial_state(self):
# tests that when MPF starts, the initial states of switches that
# started in that state are read correctly.
self.assertFalse(self.machine.switch_controller.is_active('s_test',
1000))
|
Add test for initial switch states
|
Add test for initial switch states
|
Python
|
mit
|
missionpinball/mpf,missionpinball/mpf
|
import unittest
from mpf.system.machine import MachineController
from tests.MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def testIsActiveTimeing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
Add test for initial switch states
|
from tests.MpfTestCase import MpfTestCase
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def test_is_active_timing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
def test_initial_state(self):
# tests that when MPF starts, the initial states of switches that
# started in that state are read correctly.
self.assertFalse(self.machine.switch_controller.is_active('s_test',
1000))
|
<commit_before>import unittest
from mpf.system.machine import MachineController
from tests.MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def testIsActiveTimeing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
<commit_msg>Add test for initial switch states<commit_after>
|
from tests.MpfTestCase import MpfTestCase
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def test_is_active_timing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
def test_initial_state(self):
# tests that when MPF starts, the initial states of switches that
# started in that state are read correctly.
self.assertFalse(self.machine.switch_controller.is_active('s_test',
1000))
|
import unittest
from mpf.system.machine import MachineController
from tests.MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def testIsActiveTimeing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
Add test for initial switch statesfrom tests.MpfTestCase import MpfTestCase
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def test_is_active_timing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
def test_initial_state(self):
# tests that when MPF starts, the initial states of switches that
# started in that state are read correctly.
self.assertFalse(self.machine.switch_controller.is_active('s_test',
1000))
|
<commit_before>import unittest
from mpf.system.machine import MachineController
from tests.MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def testIsActiveTimeing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
<commit_msg>Add test for initial switch states<commit_after>from tests.MpfTestCase import MpfTestCase
class TestSwitchController(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return '../tests/machine_files/switch_controller/'
def _callback(self):
self.isActive = self.machine.switch_controller.is_active("s_test", ms=300)
def test_is_active_timing(self):
self.isActive = None
self.machine.switch_controller.add_switch_handler(
switch_name="s_test",
callback=self._callback,
state=1, ms=300)
self.machine.switch_controller.process_switch("s_test", 1, True)
self.advance_time_and_run(3)
self.assertEqual(True, self.isActive)
def test_initial_state(self):
# tests that when MPF starts, the initial states of switches that
# started in that state are read correctly.
self.assertFalse(self.machine.switch_controller.is_active('s_test',
1000))
|
29419cf81068183029b1dc63e718937de155a754
|
test/weakref_test.py
|
test/weakref_test.py
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assert_(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assert_(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assert_(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assertTrue(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assertTrue(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assertTrue(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
Fix one more deprecation warning
|
Fix one more deprecation warning
|
Python
|
lgpl-2.1
|
vapoursynth/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assert_(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assert_(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assert_(ref() is frame)
if __name__ == '__main__':
unittest.main()
Fix one more deprecation warning
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assertTrue(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assertTrue(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assertTrue(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assert_(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assert_(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assert_(ref() is frame)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix one more deprecation warning<commit_after>
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assertTrue(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assertTrue(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assertTrue(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assert_(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assert_(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assert_(ref() is frame)
if __name__ == '__main__':
unittest.main()
Fix one more deprecation warningimport weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assertTrue(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assertTrue(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assertTrue(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assert_(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assert_(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assert_(ref() is frame)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix one more deprecation warning<commit_after>import weakref
import unittest
import vapoursynth as vs
class FilterTestSequence(unittest.TestCase):
def setUp(self):
self.core = vs.get_core()
def test_weakref_core(self):
ref = weakref.ref(self.core)
self.assertTrue(ref() is self.core)
def test_weakref_node(self):
video = self.core.std.BlankClip()
ref = weakref.ref(video)
self.assertTrue(ref() is video)
def test_weakref_frame(self):
video = self.core.std.BlankClip()
frame = video.get_frame(0)
ref = weakref.ref(frame)
self.assertTrue(ref() is frame)
if __name__ == '__main__':
unittest.main()
|
1964407097b15c92e9b3aa77dc3d6d94bb656757
|
turbustat/tests/test_dendro.py
|
turbustat/tests/test_dendro.py
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
Add testing of loading and saving for Dendrogram_Stats
|
Add testing of loading and saving for Dendrogram_Stats
|
Python
|
mit
|
Astroua/TurbuStat,e-koch/TurbuStat
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
Add testing of loading and saving for Dendrogram_Stats
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
<commit_before># Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
<commit_msg>Add testing of loading and saving for Dendrogram_Stats<commit_after>
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
Add testing of loading and saving for Dendrogram_Stats# Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
<commit_before># Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
<commit_msg>Add testing of loading and saving for Dendrogram_Stats<commit_after># Licensed under an MIT open source license - see LICENSE
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
8b09bc6854075f43bf408169a743d023f60fbe0b
|
telemetry/telemetry/page/actions/navigate.py
|
telemetry/telemetry/page/actions/navigate.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
if hasattr(self, 'timeout_seconds') and self.timeout_seconds:
tab.Navigate(target_side_url,
page.script_to_evaluate_on_commit,
self.timeout_seconds)
else:
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
Add a timeout attr to NavigateAction.
|
Add a timeout attr to NavigateAction.
BUG=320748
Review URL: https://codereview.chromium.org/202483006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@257922 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
catapult-project/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,sahiljain/catapult,benschmaus/catapult
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
Add a timeout attr to NavigateAction.
BUG=320748
Review URL: https://codereview.chromium.org/202483006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@257922 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
if hasattr(self, 'timeout_seconds') and self.timeout_seconds:
tab.Navigate(target_side_url,
page.script_to_evaluate_on_commit,
self.timeout_seconds)
else:
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
<commit_msg>Add a timeout attr to NavigateAction.
BUG=320748
Review URL: https://codereview.chromium.org/202483006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@257922 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
if hasattr(self, 'timeout_seconds') and self.timeout_seconds:
tab.Navigate(target_side_url,
page.script_to_evaluate_on_commit,
self.timeout_seconds)
else:
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
Add a timeout attr to NavigateAction.
BUG=320748
Review URL: https://codereview.chromium.org/202483006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@257922 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
if hasattr(self, 'timeout_seconds') and self.timeout_seconds:
tab.Navigate(target_side_url,
page.script_to_evaluate_on_commit,
self.timeout_seconds)
else:
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
<commit_msg>Add a timeout attr to NavigateAction.
BUG=320748
Review URL: https://codereview.chromium.org/202483006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@257922 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
class NavigateAction(page_action.PageAction):
def __init__(self, attributes=None):
super(NavigateAction, self).__init__(attributes)
def RunAction(self, page, tab):
if page.is_file:
target_side_url = tab.browser.http_server.UrlOf(page.file_path_url)
else:
target_side_url = page.url
if hasattr(self, 'timeout_seconds') and self.timeout_seconds:
tab.Navigate(target_side_url,
page.script_to_evaluate_on_commit,
self.timeout_seconds)
else:
tab.Navigate(target_side_url, page.script_to_evaluate_on_commit)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
|
12d525b79e78d8e183d75a2b81221f7d18519897
|
tests/kernel_test.py
|
tests/kernel_test.py
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
Fix tests related to result collection
|
Fix tests related to result collection
|
Python
|
mit
|
vdjagilev/desefu
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
Fix tests related to result collection
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
<commit_before>from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
<commit_msg>Fix tests related to result collection<commit_after>
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
Fix tests related to result collectionfrom kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
<commit_before>from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
<commit_msg>Fix tests related to result collection<commit_after>from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
98de0f94332cd2a0faedd1c72d2ee4092552fdb0
|
tests/unit/helper.py
|
tests/unit/helper.py
|
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
Fix the issue where the mock is persisting calls
|
Fix the issue where the mock is persisting calls
|
Python
|
bsd-3-clause
|
jim-minter/github3.py,wbrefvem/github3.py,agamdua/github3.py,h4ck3rm1k3/github3.py,krxsky/github3.py,balloob/github3.py,ueg1990/github3.py,sigmavirus24/github3.py,icio/github3.py,christophelec/github3.py,itsmemattchung/github3.py,degustaf/github3.py
|
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
Fix the issue where the mock is persisting calls
|
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
<commit_before>import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
<commit_msg>Fix the issue where the mock is persisting calls<commit_after>
|
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
Fix the issue where the mock is persisting callsimport mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
<commit_before>import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
<commit_msg>Fix the issue where the mock is persisting calls<commit_after>import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
64c04167b800c6e90c8473c2d89896fb2bfa3bc7
|
nashvegas/models.py
|
nashvegas/models.py
|
from datetime import datetime
from django.db import models
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=datetime.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
from django.db import models
from django.utils import timezone
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=timezone.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
Fix timezone support for migrations
|
Fix timezone support for migrations
|
Python
|
mit
|
paltman-archive/nashvegas,paltman/nashvegas,dcramer/nashvegas,jonathanchu/nashvegas,iivvoo/nashvegas
|
from datetime import datetime
from django.db import models
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=datetime.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
Fix timezone support for migrations
|
from django.db import models
from django.utils import timezone
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=timezone.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
<commit_before>from datetime import datetime
from django.db import models
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=datetime.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
<commit_msg>Fix timezone support for migrations<commit_after>
|
from django.db import models
from django.utils import timezone
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=timezone.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
from datetime import datetime
from django.db import models
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=datetime.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
Fix timezone support for migrationsfrom django.db import models
from django.utils import timezone
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=timezone.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
<commit_before>from datetime import datetime
from django.db import models
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=datetime.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
<commit_msg>Fix timezone support for migrations<commit_after>from django.db import models
from django.utils import timezone
class Migration(models.Model):
migration_label = models.CharField(max_length=200)
date_created = models.DateTimeField(default=timezone.now)
content = models.TextField()
scm_version = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return unicode("%s [%s]" % (self.migration_label, self.scm_version))
|
ff17f0ef71ccc2e553b19d67eac13ec74021f0a5
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.13.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.13.3
|
Increment version number to 0.13.3
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.13.3
|
"""Configuration for Django system."""
__version__ = "0.13.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.13.3<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.13.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.13.3"""Configuration for Django system."""
__version__ = "0.13.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.13.3<commit_after>"""Configuration for Django system."""
__version__ = "0.13.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
8b73f0e4e70fa1ac6705a4c44878f4910beb8cfb
|
tests/scratchtest2.py
|
tests/scratchtest2.py
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
dict = lexer.get_dictionary(lexer._dict_addr)
print dict
print
print "dictionary has", len(dict.keys()), "items"
print lexer._dict
def lex_split(str, separators):
split_str = []
prev_i = 0
i = 0
while i < len(str):
if str[i] in separators:
split_str.append(str[prev_i:i])
split_str.append(str[i])
prev_i = i+1
i = i+1
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
print lexer._dict
|
Revert r67, which was not the changeset intended for commit.
|
Revert r67, which was not the changeset intended for commit.
|
Python
|
bsd-3-clause
|
sussman/zvm,sussman/zvm
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
dict = lexer.get_dictionary(lexer._dict_addr)
print dict
print
print "dictionary has", len(dict.keys()), "items"
print lexer._dict
def lex_split(str, separators):
split_str = []
prev_i = 0
i = 0
while i < len(str):
if str[i] in separators:
split_str.append(str[prev_i:i])
split_str.append(str[i])
prev_i = i+1
i = i+1
Revert r67, which was not the changeset intended for commit.
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
print lexer._dict
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
dict = lexer.get_dictionary(lexer._dict_addr)
print dict
print
print "dictionary has", len(dict.keys()), "items"
print lexer._dict
def lex_split(str, separators):
split_str = []
prev_i = 0
i = 0
while i < len(str):
if str[i] in separators:
split_str.append(str[prev_i:i])
split_str.append(str[i])
prev_i = i+1
i = i+1
<commit_msg>Revert r67, which was not the changeset intended for commit.<commit_after>
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
print lexer._dict
|
#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
dict = lexer.get_dictionary(lexer._dict_addr)
print dict
print
print "dictionary has", len(dict.keys()), "items"
print lexer._dict
def lex_split(str, separators):
split_str = []
prev_i = 0
i = 0
while i < len(str):
if str[i] in separators:
split_str.append(str[prev_i:i])
split_str.append(str[i])
prev_i = i+1
i = i+1
Revert r67, which was not the changeset intended for commit.#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
print lexer._dict
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
dict = lexer.get_dictionary(lexer._dict_addr)
print dict
print
print "dictionary has", len(dict.keys()), "items"
print lexer._dict
def lex_split(str, separators):
split_str = []
prev_i = 0
i = 0
while i < len(str):
if str[i] in separators:
split_str.append(str[prev_i:i])
split_str.append(str[i])
prev_i = i+1
i = i+1
<commit_msg>Revert r67, which was not the changeset intended for commit.<commit_after>#!/usr/bin/env python
import sys
sys.path.append("../zvm")
from zmemory import ZMemory
from zlexer import ZLexer
story = file("../stories/zork.z1").read()
mem = ZMemory(story)
lexer = ZLexer(mem)
print "This story is z version", mem.version
print "Standard dictionary:"
print " word separators are", lexer._separators
print " each dict value is", lexer.get_dictionary_entry_length(lexer._dict_addr), "bytes long"
print " there are", lexer.get_dictionary_num_entries(lexer._dict_addr), "entries in the dictionary"
print lexer._dict
|
c2a5a62e14780a90e7b0dab5a570d1e02d6e9030
|
api/ud_helper.py
|
api/ud_helper.py
|
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
Improve parsing of short strings by adding period.
|
Improve parsing of short strings by adding period.
Former-commit-id: 812679f50e3dc89a10b1bc7c70061d2e6087c041
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
Improve parsing of short strings by adding period.
Former-commit-id: 812679f50e3dc89a10b1bc7c70061d2e6087c041
|
import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
<commit_before>from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
<commit_msg>Improve parsing of short strings by adding period.
Former-commit-id: 812679f50e3dc89a10b1bc7c70061d2e6087c041<commit_after>
|
import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
Improve parsing of short strings by adding period.
Former-commit-id: 812679f50e3dc89a10b1bc7c70061d2e6087c041import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
<commit_before>from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
<commit_msg>Improve parsing of short strings by adding period.
Former-commit-id: 812679f50e3dc89a10b1bc7c70061d2e6087c041<commit_after>import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
78977a0f976615e76db477b0ab7b35193b34d189
|
api/__init__.py
|
api/__init__.py
|
from flask import Flask
app = Flask(__name__)
app.secret_key = ''
import api.userview
|
from flask import Flask
from simplekv.memory import DictStore
from flaskext.kvsession import KVSessionExtension
# Use DictStore until the code is ready for production
store = DictStore()
app = Flask(__name__)
app.secret_key = ''
KVSessionExtension(store, app)
import api.userview
|
Change so that kvsession (server side sessions) is used instead of flask default
|
Change so that kvsession (server side sessions) is used instead of flask default
|
Python
|
isc
|
tobbez/lys-reader
|
from flask import Flask
app = Flask(__name__)
app.secret_key = ''
import api.userview
Change so that kvsession (server side sessions) is used instead of flask default
|
from flask import Flask
from simplekv.memory import DictStore
from flaskext.kvsession import KVSessionExtension
# Use DictStore until the code is ready for production
store = DictStore()
app = Flask(__name__)
app.secret_key = ''
KVSessionExtension(store, app)
import api.userview
|
<commit_before>from flask import Flask
app = Flask(__name__)
app.secret_key = ''
import api.userview
<commit_msg>Change so that kvsession (server side sessions) is used instead of flask default<commit_after>
|
from flask import Flask
from simplekv.memory import DictStore
from flaskext.kvsession import KVSessionExtension
# Use DictStore until the code is ready for production
store = DictStore()
app = Flask(__name__)
app.secret_key = ''
KVSessionExtension(store, app)
import api.userview
|
from flask import Flask
app = Flask(__name__)
app.secret_key = ''
import api.userview
Change so that kvsession (server side sessions) is used instead of flask defaultfrom flask import Flask
from simplekv.memory import DictStore
from flaskext.kvsession import KVSessionExtension
# Use DictStore until the code is ready for production
store = DictStore()
app = Flask(__name__)
app.secret_key = ''
KVSessionExtension(store, app)
import api.userview
|
<commit_before>from flask import Flask
app = Flask(__name__)
app.secret_key = ''
import api.userview
<commit_msg>Change so that kvsession (server side sessions) is used instead of flask default<commit_after>from flask import Flask
from simplekv.memory import DictStore
from flaskext.kvsession import KVSessionExtension
# Use DictStore until the code is ready for production
store = DictStore()
app = Flask(__name__)
app.secret_key = ''
KVSessionExtension(store, app)
import api.userview
|
0518025b568d219b2de5f19df38c03bf29cd98db
|
api/database.py
|
api/database.py
|
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
Session = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.Session = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
def add(self, cr: ConvertRequest):
try:
session = Database.Session()
session.add(cr)
session.commit()
return True
except:
return False
def retrieve(self, id):
session = Database.Session()
return session.query(ConvertRequest).filter_by(id=id).first()
def update(self):
# TODO: Update status and result of conversion jobs.
pass
|
import os
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
session_maker = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.session_maker = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
self.session = None
def add(self, cr: ConvertRequest):
try:
self.session.add(cr)
self.session.commit()
return True
except:
return False
def retrieve(self, id):
req = self.session.query(ConvertRequest).filter_by(id=id).first()
return req
def update(self):
# TODO: Update status and result of conversion jobs.
self.session.commit()
pass
def renew(self):
self.session = Database.session_maker()
def finalize(self):
self.session.commit()
self.session.close()
|
Restructure session handling in sqlalchemy.
|
Restructure session handling in sqlalchemy.
|
Python
|
mit
|
SBRG/EscherConverter,SBRG/EscherConverter,SBRG/EscherConverter,SBRG/EscherConverter,SBRG/EscherConverter
|
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
Session = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.Session = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
def add(self, cr: ConvertRequest):
try:
session = Database.Session()
session.add(cr)
session.commit()
return True
except:
return False
def retrieve(self, id):
session = Database.Session()
return session.query(ConvertRequest).filter_by(id=id).first()
def update(self):
# TODO: Update status and result of conversion jobs.
pass
Restructure session handling in sqlalchemy.
|
import os
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
session_maker = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.session_maker = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
self.session = None
def add(self, cr: ConvertRequest):
try:
self.session.add(cr)
self.session.commit()
return True
except:
return False
def retrieve(self, id):
req = self.session.query(ConvertRequest).filter_by(id=id).first()
return req
def update(self):
# TODO: Update status and result of conversion jobs.
self.session.commit()
pass
def renew(self):
self.session = Database.session_maker()
def finalize(self):
self.session.commit()
self.session.close()
|
<commit_before>import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
Session = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.Session = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
def add(self, cr: ConvertRequest):
try:
session = Database.Session()
session.add(cr)
session.commit()
return True
except:
return False
def retrieve(self, id):
session = Database.Session()
return session.query(ConvertRequest).filter_by(id=id).first()
def update(self):
# TODO: Update status and result of conversion jobs.
pass
<commit_msg>Restructure session handling in sqlalchemy.<commit_after>
|
import os
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
session_maker = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.session_maker = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
self.session = None
def add(self, cr: ConvertRequest):
try:
self.session.add(cr)
self.session.commit()
return True
except:
return False
def retrieve(self, id):
req = self.session.query(ConvertRequest).filter_by(id=id).first()
return req
def update(self):
# TODO: Update status and result of conversion jobs.
self.session.commit()
pass
def renew(self):
self.session = Database.session_maker()
def finalize(self):
self.session.commit()
self.session.close()
|
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
Session = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.Session = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
def add(self, cr: ConvertRequest):
try:
session = Database.Session()
session.add(cr)
session.commit()
return True
except:
return False
def retrieve(self, id):
session = Database.Session()
return session.query(ConvertRequest).filter_by(id=id).first()
def update(self):
# TODO: Update status and result of conversion jobs.
pass
Restructure session handling in sqlalchemy.import os
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
session_maker = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.session_maker = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
self.session = None
def add(self, cr: ConvertRequest):
try:
self.session.add(cr)
self.session.commit()
return True
except:
return False
def retrieve(self, id):
req = self.session.query(ConvertRequest).filter_by(id=id).first()
return req
def update(self):
# TODO: Update status and result of conversion jobs.
self.session.commit()
pass
def renew(self):
self.session = Database.session_maker()
def finalize(self):
self.session.commit()
self.session.close()
|
<commit_before>import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
Session = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.Session = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
def add(self, cr: ConvertRequest):
try:
session = Database.Session()
session.add(cr)
session.commit()
return True
except:
return False
def retrieve(self, id):
session = Database.Session()
return session.query(ConvertRequest).filter_by(id=id).first()
def update(self):
# TODO: Update status and result of conversion jobs.
pass
<commit_msg>Restructure session handling in sqlalchemy.<commit_after>import os
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models import ConvertRequest, Base
class Database(object):
db_connection_string = None
engine = None
session_maker = None
def __init__(self, path, echo):
if not os.path.exists(os.path.dirname(path)):
raise Exception("Path doesn't exists.")
Database.db_connection_string = "sqlite:///" + path
Database.engine = create_engine(Database.db_connection_string, echo=echo)
Database.session_maker = sessionmaker(bind=Database.engine)
Base.metadata.create_all(Database.engine)
self.session = None
def add(self, cr: ConvertRequest):
try:
self.session.add(cr)
self.session.commit()
return True
except:
return False
def retrieve(self, id):
req = self.session.query(ConvertRequest).filter_by(id=id).first()
return req
def update(self):
# TODO: Update status and result of conversion jobs.
self.session.commit()
pass
def renew(self):
self.session = Database.session_maker()
def finalize(self):
self.session.commit()
self.session.close()
|
2056fff6f93d07c3c257748ff82a93a4383da9f5
|
src/ansible/tests/test_views.py
|
src/ansible/tests/test_views.py
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_playbook_list_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_playbook_detail_url_accessible_by_name(self):
resp = self.client.get(reverse(
'ansible:playbook-detail', kwargs={'pk':1}))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
Add test for detailed view
|
Add test for detailed view
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
Add test for detailed view
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_playbook_list_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_playbook_detail_url_accessible_by_name(self):
resp = self.client.get(reverse(
'ansible:playbook-detail', kwargs={'pk':1}))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
<commit_before>from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
<commit_msg>Add test for detailed view<commit_after>
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_playbook_list_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_playbook_detail_url_accessible_by_name(self):
resp = self.client.get(reverse(
'ansible:playbook-detail', kwargs={'pk':1}))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
Add test for detailed viewfrom django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_playbook_list_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_playbook_detail_url_accessible_by_name(self):
resp = self.client.get(reverse(
'ansible:playbook-detail', kwargs={'pk':1}))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
<commit_before>from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
<commit_msg>Add test for detailed view<commit_after>from django.test import TestCase
from ansible.models import Playbook
from django.core.urlresolvers import reverse
class PlaybookListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
Playbook.query_set.create(username='lozadaomr',repository='ansi-dst',
inventory='hosts',user='ubuntu')
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/playbooks/')
self.assertEqual(resp.status_code, 200)
def test_view_playbook_list_url_accessible_by_name(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
def test_view_playbook_detail_url_accessible_by_name(self):
resp = self.client.get(reverse(
'ansible:playbook-detail', kwargs={'pk':1}))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('ansible:playbook-list'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'ansible/playbook_list.html')
|
e5776e8bd4e7ee73fea10788fd60d236abfbbfc3
|
docrepr/__init__.py
|
docrepr/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2012- Spyder Development team
#
# Licensed under the terms of the MIT or BSD Licenses
# (See every file for its license)
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2013- The Spyder Development team
#
# Licensed under the terms of the Modified BSD License
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
Fix license in init file
|
Fix license in init file
|
Python
|
bsd-3-clause
|
techtonik/docrepr,spyder-ide/docrepr,techtonik/docrepr,spyder-ide/docrepr,spyder-ide/docrepr,techtonik/docrepr
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2012- Spyder Development team
#
# Licensed under the terms of the MIT or BSD Licenses
# (See every file for its license)
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
Fix license in init file
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2013- The Spyder Development team
#
# Licensed under the terms of the Modified BSD License
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2012- Spyder Development team
#
# Licensed under the terms of the MIT or BSD Licenses
# (See every file for its license)
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
<commit_msg>Fix license in init file<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2013- The Spyder Development team
#
# Licensed under the terms of the Modified BSD License
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2012- Spyder Development team
#
# Licensed under the terms of the MIT or BSD Licenses
# (See every file for its license)
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
Fix license in init file# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2013- The Spyder Development team
#
# Licensed under the terms of the Modified BSD License
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2012- Spyder Development team
#
# Licensed under the terms of the MIT or BSD Licenses
# (See every file for its license)
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
<commit_msg>Fix license in init file<commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Tim Dumol
# Copyright (c) 2013- The Spyder Development team
#
# Licensed under the terms of the Modified BSD License
"""
Docrepr library
Library to generate rich and plain representations of docstrings,
including several metadata of the object to which the docstring
belongs
Derived from spyderlib.utils.inspector and IPython.core.oinspect
"""
# Configuration options for docrepr
options = {
'render_math': True,
'local_mathjax': False,
'collapse_sections': False,
'use_qt4': False,
'outline': False
}
|
b99a8e2fe4a4d26b8b9dfbc4b3a9effad9c89f90
|
calexicon/dates/tests/test_bce.py
|
calexicon/dates/tests/test_bce.py
|
import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
|
import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
Add tests for the subtraction operator for BCEDate.
|
Add tests for the subtraction operator for BCEDate.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
Add tests for the subtraction operator for BCEDate.
|
import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
<commit_before>import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
<commit_msg>Add tests for the subtraction operator for BCEDate.<commit_after>
|
import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
Add tests for the subtraction operator for BCEDate.import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
<commit_before>import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
<commit_msg>Add tests for the subtraction operator for BCEDate.<commit_after>import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
bacab2b55907c6c263862c2e8d9e0a58f4fbfb29
|
mediacenter/tests/test_utils.py
|
mediacenter/tests/test_utils.py
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
['pouet.xxx', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
Add case of unknown extension when testing guess_kind
|
Add case of unknown extension when testing guess_kind
|
Python
|
agpl-3.0
|
Lcaracol/ideasbox.lan,ideascube/ideascube,ideascube/ideascube,Lcaracol/ideasbox.lan,ideascube/ideascube,ideascube/ideascube,Lcaracol/ideasbox.lan
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
Add case of unknown extension when testing guess_kind
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
['pouet.xxx', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
<commit_before>import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
<commit_msg>Add case of unknown extension when testing guess_kind<commit_after>
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
['pouet.xxx', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
Add case of unknown extension when testing guess_kindimport pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
['pouet.xxx', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
<commit_before>import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
<commit_msg>Add case of unknown extension when testing guess_kind<commit_after>import pytest
from ..utils import guess_kind
@pytest.mark.parametrize('input,expected', [
['pouet.jpg', 'image'],
['pouet.jpeg', 'image'],
['pouet.png', 'image'],
['pouet.mp4', 'video'],
['pouet.avi', 'video'],
['pouet.mp3', 'audio'],
['pouet.ogg', 'audio'],
['pouet.pdf', 'pdf'],
['pouet', None],
['pouet.xxx', None],
])
def test_guess_kind(input, expected):
assert guess_kind(input) == expected
|
106868c0c4b3bb947d251a8416bbd3698af5948b
|
backend/session/permissions.py
|
backend/session/permissions.py
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
|
Fix IsStaffOrTargetUser permission when no user in request.
|
Fix IsStaffOrTargetUser permission when no user in request.
|
Python
|
mit
|
ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
Fix IsStaffOrTargetUser permission when no user in request.
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
|
<commit_before>
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
<commit_msg>Fix IsStaffOrTargetUser permission when no user in request.<commit_after>
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
|
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
Fix IsStaffOrTargetUser permission when no user in request.from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
|
<commit_before>
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
<commit_msg>Fix IsStaffOrTargetUser permission when no user in request.<commit_after>from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
|
6222bdca162da68f6a2906a2d73d6e79b6acfdc7
|
run.py
|
run.py
|
#!/usr/bin/python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
#!/usr/bin/env python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
Work apparently better in a virtualenv.
|
Work apparently better in a virtualenv.
|
Python
|
bsd-3-clause
|
batiste/django-rpg,batiste/django-rpg
|
#!/usr/bin/python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
Work apparently better in a virtualenv.
|
#!/usr/bin/env python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
<commit_before>#!/usr/bin/python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
<commit_msg>Work apparently better in a virtualenv.<commit_after>
|
#!/usr/bin/env python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
#!/usr/bin/python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
Work apparently better in a virtualenv.#!/usr/bin/env python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
<commit_before>#!/usr/bin/python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
<commit_msg>Work apparently better in a virtualenv.<commit_after>#!/usr/bin/env python
from gevent import monkey; monkey.patch_all()
from gevent.wsgi import WSGIServer
import sys
import os
import traceback
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.signals import got_request_exception
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
def exception_printer(sender, **kwargs):
traceback.print_exc()
got_request_exception.connect(exception_printer)
call_command('syncdb')
print 'Serving on 8088...'
WSGIServer(('', 8088), WSGIHandler()).serve_forever()
|
e8175497157ed34f91b9ba96118c4e76cd3ed0e4
|
bmsmodules/Events.py
|
bmsmodules/Events.py
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, basestring):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, (basestring, int)):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
else:
self._events_[eventname](*args, **kwargs)
|
Add event execution, allow integers as event name
|
Add event execution, allow integers as event name
|
Python
|
bsd-3-clause
|
RenolY2/py-playBMS
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, basestring):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))Add event execution, allow integers as event name
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, (basestring, int)):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
else:
self._events_[eventname](*args, **kwargs)
|
<commit_before>from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, basestring):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))<commit_msg>Add event execution, allow integers as event name<commit_after>
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, (basestring, int)):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
else:
self._events_[eventname](*args, **kwargs)
|
from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, basestring):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))Add event execution, allow integers as event namefrom operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, (basestring, int)):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
else:
self._events_[eventname](*args, **kwargs)
|
<commit_before>from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, basestring):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))<commit_msg>Add event execution, allow integers as event name<commit_after>from operator import isCallable
class Events(object):
def __init__(self):
self._events_ = {}
def addEvent(self, eventname, func):
if not isCallable(func):
raise RuntimeError("func argument must be a function!")
elif not isinstance(eventname, (basestring, int)):
raise RuntimeError("Event name must be a string!")
elif eventname in self._events_:
raise RuntimeError("Event name already exists!")
else:
self._events_[eventname] = func
def execEvent(self, eventname, *args, **kwargs):
if eventname not in self._events_:
raise RuntimeError("No such Event name '{0}'".format(eventname))
else:
self._events_[eventname](*args, **kwargs)
|
6782e88a48a40dffead893f9fdb2ac0eb6dae7f4
|
datashape/error.py
|
datashape/error.py
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
print(str(self)) # REMOVEME
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
Remove the print from datashape
|
Remove the print from datashape
|
Python
|
bsd-2-clause
|
cowlicks/datashape,cpcloud/datashape,quantopian/datashape,ContinuumIO/datashape,llllllllll/datashape,quantopian/datashape,cpcloud/datashape,cowlicks/datashape,ContinuumIO/datashape,blaze/datashape,llllllllll/datashape,blaze/datashape
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
print(str(self)) # REMOVEME
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
Remove the print from datashape
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
<commit_before>"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
print(str(self)) # REMOVEME
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
<commit_msg>Remove the print from datashape<commit_after>
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
print(str(self)) # REMOVEME
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
Remove the print from datashape"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
<commit_before>"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
print(str(self)) # REMOVEME
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
<commit_msg>Remove the print from datashape<commit_after>"""Error handling"""
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class DataShapeSyntaxError(SyntaxError):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' ' * self.col_offset + '^'
return syntax_error.format(
filename=self.filename,
lineno=self.lineno,
line=self.line,
pointer=pointer,
msg=self.msg,
error=self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
2cae562dc84ba09d0c6a90cf5cde72fba05ac8e3
|
f5_cccl/__init__.py
|
f5_cccl/__init__.py
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
Add the proper spacing to the flake exception comment
|
Add the proper spacing to the flake exception comment
|
Python
|
apache-2.0
|
f5devcentral/f5-cccl,ryan-talley/f5-cccl,richbrowne/f5-cccl,ryan-talley/f5-cccl,f5devcentral/f5-cccl,richbrowne/f5-cccl
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
Add the proper spacing to the flake exception comment
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
<commit_before>#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
<commit_msg>Add the proper spacing to the flake exception comment<commit_after>
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
Add the proper spacing to the flake exception comment#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
<commit_before>#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
<commit_msg>Add the proper spacing to the flake exception comment<commit_after>#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""F5 Common Controller Core Library.
This module implements a Common Controller Core Library for use within other
libraries that need to read, diff and apply configurations to a BIG-IP.
"""
__version__ = '0.1.0'
from .api import F5CloudServiceManager # noqa: F401, F403
|
0916ed4903914ee46dbe4e451d367dff719c9a15
|
tests/example_project/urls.py
|
tests/example_project/urls.py
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib.admin import autodiscover
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
admin.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
('^admin/', include(admin.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
Test running both newman/ and admin/ - some templates still mixed.
|
Test running both newman/ and admin/ - some templates still mixed.
|
Python
|
bsd-3-clause
|
petrlosa/ella,ella/ella,whalerock/ella,WhiskeyMedia/ella,WhiskeyMedia/ella,petrlosa/ella,MichalMaM/ella,whalerock/ella,MichalMaM/ella,whalerock/ella
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib.admin import autodiscover
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
Test running both newman/ and admin/ - some templates still mixed.
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
admin.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
('^admin/', include(admin.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
<commit_before>from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib.admin import autodiscover
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
<commit_msg>Test running both newman/ and admin/ - some templates still mixed.<commit_after>
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
admin.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
('^admin/', include(admin.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib.admin import autodiscover
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
Test running both newman/ and admin/ - some templates still mixed.from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
admin.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
('^admin/', include(admin.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
<commit_before>from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib.admin import autodiscover
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
<commit_msg>Test running both newman/ and admin/ - some templates still mixed.<commit_after>from os.path import dirname, join, normpath
import django
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
import ella
from ella import newman
from ella.utils import installedapps
newman.autodiscover()
admin.autodiscover()
installedapps.init_logger()
ADMIN_ROOTS = (
normpath(join(dirname(ella.__file__), 'newman', 'media')),
normpath(join(dirname(django.__file__), 'contrib', 'admin', 'media')),
)
urlpatterns = patterns('',
# serve admin media static files
(r'^static/newman_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
(r'^static/admin_media/(?P<path>.*)$', 'ella.utils.views.fallback_serve', {'document_roots': ADMIN_ROOTS}),
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# main admin urls
('^newman/', include(newman.site.urls)),
('^admin/', include(admin.site.urls)),
# reverse url lookups
(r'^', include('ella.core.urls')),
)
handler404 = 'ella.core.views.page_not_found'
handler500 = 'ella.core.views.handle_error'
|
98b66fd28d0651022a55fb3d32c69a533e395760
|
tests/test_get_user_config.py
|
tests/test_get_user_config.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
Remove self references from setup/teardown
|
Remove self references from setup/teardown
|
Python
|
bsd-3-clause
|
Vauxoo/cookiecutter,willingc/cookiecutter,Springerle/cookiecutter,lucius-feng/cookiecutter,agconti/cookiecutter,lucius-feng/cookiecutter,dajose/cookiecutter,atlassian/cookiecutter,dajose/cookiecutter,vintasoftware/cookiecutter,cguardia/cookiecutter,michaeljoseph/cookiecutter,tylerdave/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,nhomar/cookiecutter,venumech/cookiecutter,ramiroluz/cookiecutter,vintasoftware/cookiecutter,terryjbates/cookiecutter,drgarcia1986/cookiecutter,atlassian/cookiecutter,janusnic/cookiecutter,pjbull/cookiecutter,drgarcia1986/cookiecutter,moi65/cookiecutter,nhomar/cookiecutter,janusnic/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,agconti/cookiecutter,christabor/cookiecutter,jhermann/cookiecutter,willingc/cookiecutter,sp1rs/cookiecutter,0k/cookiecutter,sp1rs/cookiecutter,Vauxoo/cookiecutter,cichm/cookiecutter,Springerle/cookiecutter,ionelmc/cookiecutter,vincentbernat/cookiecutter,jhermann/cookiecutter,tylerdave/cookiecutter,stevepiercy/cookiecutter,ramiroluz/cookiecutter,cichm/cookiecutter,moi65/cookiecutter,luzfcb/cookiecutter,kkujawinski/cookiecutter,venumech/cookiecutter,hackebrot/cookiecutter,lgp171188/cookiecutter,christabor/cookiecutter,takeflight/cookiecutter,ionelmc/cookiecutter,benthomasson/cookiecutter,michaeljoseph/cookiecutter,foodszhang/cookiecutter,0k/cookiecutter,benthomasson/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,foodszhang/cookiecutter,lgp171188/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,kkujawinski/cookiecutter,takeflight/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
Remove self references from setup/teardown
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
<commit_msg>Remove self references from setup/teardown<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
Remove self references from setup/teardown#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
<commit_msg>Remove self references from setup/teardown<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
2a322d26d4ed299d21a1b931e03311ff02a23e0f
|
app/status/views/healthcheck.py
|
app/status/views/healthcheck.py
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client, version
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(status="ok", api=api_status, git_commit=version.__git_commit__, build_time=version.__time__), 200
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(
status="ok",
api=api_status), 200
|
Remove git commit/version from status endpoint
|
Remove git commit/version from status endpoint
This is temporary for the purpose of getting running in
Docker with minimal build steps.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client, version
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(status="ok", api=api_status, git_commit=version.__git_commit__, build_time=version.__time__), 200
Remove git commit/version from status endpoint
This is temporary for the purpose of getting running in
Docker with minimal build steps.
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(
status="ok",
api=api_status), 200
|
<commit_before>from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client, version
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(status="ok", api=api_status, git_commit=version.__git_commit__, build_time=version.__time__), 200
<commit_msg>Remove git commit/version from status endpoint
This is temporary for the purpose of getting running in
Docker with minimal build steps.<commit_after>
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(
status="ok",
api=api_status), 200
|
from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client, version
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(status="ok", api=api_status, git_commit=version.__git_commit__, build_time=version.__time__), 200
Remove git commit/version from status endpoint
This is temporary for the purpose of getting running in
Docker with minimal build steps.from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(
status="ok",
api=api_status), 200
|
<commit_before>from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client, version
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(status="ok", api=api_status, git_commit=version.__git_commit__, build_time=version.__time__), 200
<commit_msg>Remove git commit/version from status endpoint
This is temporary for the purpose of getting running in
Docker with minimal build steps.<commit_after>from flask import current_app, jsonify, request
from notifications_python_client.errors import HTTPError
from app import status_api_client
from app.status import status
@status.route("/_status", methods=["GET"])
def show_status():
if request.args.get("elb", None) or request.args.get("simple", None):
return jsonify(status="ok"), 200
else:
try:
api_status = status_api_client.get_status()
except HTTPError as e:
current_app.logger.exception("API failed to respond")
return jsonify(status="error", message=str(e.message)), 500
return jsonify(
status="ok",
api=api_status), 200
|
b2d9b27b383c716ef3f15c96f6627837ffd1751e
|
app/modals/user.py
|
app/modals/user.py
|
from app import db
class Users(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
from config import db
class User(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
Add db from config to modals
|
Add db from config to modals
|
Python
|
mit
|
tforrest/soda-automation,tforrest/soda-automation
|
from app import db
class Users(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
Add db from config to modals
|
from config import db
class User(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
<commit_before>from app import db
class Users(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
<commit_msg>Add db from config to modals<commit_after>
|
from config import db
class User(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
from app import db
class Users(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
Add db from config to modalsfrom config import db
class User(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
<commit_before>from app import db
class Users(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
<commit_msg>Add db from config to modals<commit_after>from config import db
class User(db.Modal):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
asu_id = db.Column(db.Integer,nullable=False)
class_standing = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100))
phone_number = db.Column(db.String(100))
def __init__(self,name,asu_id,class_standing,email,phone_number):
self.name = name
self.asu_id = asu_id
self.class_standing = class_standing
self.email = email
self.phone_number = phone_number
def __repr__(self):
return "User's name: %s" % self.name
|
72fcff2c4bb0b6823aef66e2c4a43e090e1fa38c
|
toolkit/devserver_settings.py
|
toolkit/devserver_settings.py
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# MEDIA_ROOT = '/var/www_toolkit/site/media'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
import warnings
warnings.filterwarnings('error', r"DateTimeField received a naive datetime",
RuntimeWarning, r'django\.db\.models\.fields')
|
Throw exceptions on datetimefield errors in debug mode
|
Throw exceptions on datetimefield errors in debug mode
|
Python
|
agpl-3.0
|
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
Throw exceptions on datetimefield errors in debug mode
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# MEDIA_ROOT = '/var/www_toolkit/site/media'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
import warnings
warnings.filterwarnings('error', r"DateTimeField received a naive datetime",
RuntimeWarning, r'django\.db\.models\.fields')
|
<commit_before>import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
<commit_msg>Throw exceptions on datetimefield errors in debug mode<commit_after>
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# MEDIA_ROOT = '/var/www_toolkit/site/media'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
import warnings
warnings.filterwarnings('error', r"DateTimeField received a naive datetime",
RuntimeWarning, r'django\.db\.models\.fields')
|
import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
Throw exceptions on datetimefield errors in debug modeimport os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# MEDIA_ROOT = '/var/www_toolkit/site/media'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
import warnings
warnings.filterwarnings('error', r"DateTimeField received a naive datetime",
RuntimeWarning, r'django\.db\.models\.fields')
|
<commit_before>import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
<commit_msg>Throw exceptions on datetimefield errors in debug mode<commit_after>import os.path
import logging
import logging.config
from toolkit.settings_common import *
APP_ROOT = '/home/ben/data/python/cube'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://toolkit/media/'
# MEDIA_ROOT = '/var/www_toolkit/site/media'
# Enable Debug mode, add in Django toolbar:
DEBUG = True
# Django toolbar things:
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {'INTERCEPT_REDIRECTS': False, }
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('debug_toolbar')
# Enable logging to the console:
logging.basicConfig(
# level = logging.DEBUG,
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
)
import warnings
warnings.filterwarnings('error', r"DateTimeField received a naive datetime",
RuntimeWarning, r'django\.db\.models\.fields')
|
7898e0aea72313b769e0c42eea961319539f543b
|
apps/contribution/serializers.py
|
apps/contribution/serializers.py
|
from rest_framework import serializers
from apps.contribution.models import Repository
class RepositorySerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at')
|
from rest_framework import serializers
from apps.contribution.models import Repository, RepositoryLanguage
class RepositoryLanguagesSerializer(serializers.ModelSerializer):
class Meta(object):
model = RepositoryLanguage
fields = ('type', 'size')
class RepositorySerializer(serializers.ModelSerializer):
languages = RepositoryLanguagesSerializer(many=True)
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at', 'languages')
|
Add languages to api endpoint
|
Add languages to api endpoint
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
from rest_framework import serializers
from apps.contribution.models import Repository
class RepositorySerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at')
Add languages to api endpoint
|
from rest_framework import serializers
from apps.contribution.models import Repository, RepositoryLanguage
class RepositoryLanguagesSerializer(serializers.ModelSerializer):
class Meta(object):
model = RepositoryLanguage
fields = ('type', 'size')
class RepositorySerializer(serializers.ModelSerializer):
languages = RepositoryLanguagesSerializer(many=True)
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at', 'languages')
|
<commit_before>from rest_framework import serializers
from apps.contribution.models import Repository
class RepositorySerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at')
<commit_msg>Add languages to api endpoint<commit_after>
|
from rest_framework import serializers
from apps.contribution.models import Repository, RepositoryLanguage
class RepositoryLanguagesSerializer(serializers.ModelSerializer):
class Meta(object):
model = RepositoryLanguage
fields = ('type', 'size')
class RepositorySerializer(serializers.ModelSerializer):
languages = RepositoryLanguagesSerializer(many=True)
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at', 'languages')
|
from rest_framework import serializers
from apps.contribution.models import Repository
class RepositorySerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at')
Add languages to api endpointfrom rest_framework import serializers
from apps.contribution.models import Repository, RepositoryLanguage
class RepositoryLanguagesSerializer(serializers.ModelSerializer):
class Meta(object):
model = RepositoryLanguage
fields = ('type', 'size')
class RepositorySerializer(serializers.ModelSerializer):
languages = RepositoryLanguagesSerializer(many=True)
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at', 'languages')
|
<commit_before>from rest_framework import serializers
from apps.contribution.models import Repository
class RepositorySerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at')
<commit_msg>Add languages to api endpoint<commit_after>from rest_framework import serializers
from apps.contribution.models import Repository, RepositoryLanguage
class RepositoryLanguagesSerializer(serializers.ModelSerializer):
class Meta(object):
model = RepositoryLanguage
fields = ('type', 'size')
class RepositorySerializer(serializers.ModelSerializer):
languages = RepositoryLanguagesSerializer(many=True)
class Meta:
model = Repository
fields = ('id', 'name', 'description', 'url', 'updated_at', 'languages')
|
ac8ab5a191de399477ce7693307ef1e114e841c6
|
base_kanban_stage_state/__manifest__.py
|
base_kanban_stage_state/__manifest__.py
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
Fix license in manifest file
|
Fix license in manifest file
|
Python
|
agpl-3.0
|
ovnicraft/server-tools,ovnicraft/server-tools,thinkopensolutions/server-tools,thinkopensolutions/server-tools,ovnicraft/server-tools
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
Fix license in manifest file
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
<commit_msg>Fix license in manifest file<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
Fix license in manifest file# -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
<commit_msg>Fix license in manifest file<commit_after># -*- coding: utf-8 -*-
# Copyright 2017 Specialty Medical Drugstore
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Base Kanban Stage State",
"summary": "Maps stages from base_kanban_stage to states",
"version": "10.0.1.0.0",
"category": "Base",
"website": "https://odoo-community.org/",
"author": "SMDrugstore, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"base_kanban_stage",
],
"data": [
"views/base_kanban_stage_state_view.xml",
],
}
|
52c5b5823a4d808c96f525c95df7e269d6db2a98
|
astrobin_apps_donations/utils.py
|
astrobin_apps_donations/utils.py
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
Fix checking whether user is donor.
|
Fix checking whether user is donor.
|
Python
|
agpl-3.0
|
astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
Fix checking whether user is donor.
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
<commit_before>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
<commit_msg>Fix checking whether user is donor.<commit_after>
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
Fix checking whether user is donor.from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
<commit_before>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
<commit_msg>Fix checking whether user is donor.<commit_after>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
fb59c2c7c01da9f4040c6b9c818d1fe2fc7993bb
|
get_weather_data.py
|
get_weather_data.py
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
Fix bug with data frames
|
Fix bug with data frames
|
Python
|
mit
|
tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
Fix bug with data frames
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
<commit_before># get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
<commit_msg>Fix bug with data frames<commit_after>
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
Fix bug with data frames# get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
<commit_before># get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
<commit_msg>Fix bug with data frames<commit_after># get_weather_data.py
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
f2da30eb43a10c3e44d3e9b8d77ddb146ad88a0f
|
python/src/setup.py
|
python/src/setup.py
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.5",
"GoogleAppEnginePipeline >= 1.9.5.1"
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
Move MR and Pipelines to 1.9.15.0 - Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
|
Move MR and Pipelines to 1.9.15.0
- Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7173
|
Python
|
apache-2.0
|
westerhofffl/appengine-mapreduce,chargrizzle/appengine-mapreduce,talele08/appengine-mapreduce,bmenasha/appengine-mapreduce,rbruyere/appengine-mapreduce,vendasta/appengine-mapreduce,talele08/appengine-mapreduce,lordzuko/appengine-mapreduce,vendasta/appengine-mapreduce,bmenasha/appengine-mapreduce,VirusTotal/appengine-mapreduce,chargrizzle/appengine-mapreduce,vendasta/appengine-mapreduce,VirusTotal/appengine-mapreduce,lordzuko/appengine-mapreduce,aozarov/appengine-mapreduce,chargrizzle/appengine-mapreduce,talele08/appengine-mapreduce,Candreas/mapreduce,talele08/appengine-mapreduce,lordzuko/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,vendasta/appengine-mapreduce,mikelambert/appengine-mapreduce,westerhofffl/appengine-mapreduce,potatolondon/potato-mapreduce,rbruyere/appengine-mapreduce,soundofjw/appengine-mapreduce,ankit318/appengine-mapreduce,ankit318/appengine-mapreduce,westerhofffl/appengine-mapreduce,Candreas/mapreduce,ankit318/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,rbruyere/appengine-mapreduce,aozarov/appengine-mapreduce,mikelambert/appengine-mapreduce,ankit318/appengine-mapreduce,bmenasha/appengine-mapreduce,rbruyere/appengine-mapreduce,potatolondon/potato-mapreduce,aozarov/appengine-mapreduce,soundofjw/appengine-mapreduce,vendasta/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,westerhofffl/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,Candreas/mapreduce,talele08/appengine-mapreduce,mikelambert/appengine-mapreduce,lordzuko/appengine-mapreduce,bmenasha/appengine-mapreduce,ankit318/appengine-mapreduce,aozarov/appengine-mapreduce,aozarov/appengine-mapreduce,lordzuko/appengine-mapreduce,VirusTotal/appengine-mapreduce,chargrizzle/appengine-mapreduce,westerhofffl/appengine-mapreduce,mikelambert/appengine-mapreduce,soundofjw/appengine-mapreduce,VirusTotal/appengine-mapreduce,bmenasha/appengine-mapreduce,VirusTotal/appengine-mapreduce,potatolondon/potato-mapreduce,soundofjw/appengine-mapreduce,mikelambert/appengine-mapreduce,Candreas/mapreduce,Candreas/mapreduce,soundofjw/appengine-mapreduce,chargrizzle/appengine-mapreduce,rbruyere/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.5",
"GoogleAppEnginePipeline >= 1.9.5.1"
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
Move MR and Pipelines to 1.9.15.0
- Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7173
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.5",
"GoogleAppEnginePipeline >= 1.9.5.1"
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
<commit_msg>Move MR and Pipelines to 1.9.15.0
- Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7173<commit_after>
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.5",
"GoogleAppEnginePipeline >= 1.9.5.1"
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
Move MR and Pipelines to 1.9.15.0
- Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7173#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.5",
"GoogleAppEnginePipeline >= 1.9.5.1"
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
<commit_msg>Move MR and Pipelines to 1.9.15.0
- Slight tweaks to requirements.txt for pipeline to add GCS Client as a dep.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7173<commit_after>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
a1ff0c90072973333aaa7eb246cd754edca7731f
|
byceps/services/brand/dbmodels/brand.py
|
byceps/services/brand/dbmodels/brand.py
|
"""
byceps.services.dbbrand.models.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
"""
byceps.services.brand.dbmodels.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
Fix module name in module docstring
|
Fix module name in module docstring
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
byceps.services.dbbrand.models.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
Fix module name in module docstring
|
"""
byceps.services.brand.dbmodels.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
<commit_before>"""
byceps.services.dbbrand.models.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
<commit_msg>Fix module name in module docstring<commit_after>
|
"""
byceps.services.brand.dbmodels.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
"""
byceps.services.dbbrand.models.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
Fix module name in module docstring"""
byceps.services.brand.dbmodels.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
<commit_before>"""
byceps.services.dbbrand.models.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
<commit_msg>Fix module name in module docstring<commit_after>"""
byceps.services.brand.dbmodels.brand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....typing import BrandID
from ....util.instances import ReprBuilder
class Brand(db.Model):
"""A party brand."""
__tablename__ = 'brands'
id = db.Column(db.UnicodeText, primary_key=True)
title = db.Column(db.UnicodeText, unique=True, nullable=False)
image_filename = db.Column(db.UnicodeText, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self,
brand_id: BrandID,
title: str,
*,
image_filename: Optional[str] = None,
) -> None:
self.id = brand_id
self.title = title
self.image_filename = image_filename
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.build()
|
fddd30a01f3d7b3a6e4e125919e3fc607980fded
|
btcx/__init__.py
|
btcx/__init__.py
|
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
import btce
import mtgox
import cfgmanager
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
Support for `import btcx; btcx.btce; ...`
|
Support for `import btcx; btcx.btce; ...`
|
Python
|
mit
|
knowitnothing/btcx,knowitnothing/btcx
|
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
Support for `import btcx; btcx.btce; ...`
|
import btce
import mtgox
import cfgmanager
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
<commit_before>__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
<commit_msg>Support for `import btcx; btcx.btce; ...`<commit_after>
|
import btce
import mtgox
import cfgmanager
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
Support for `import btcx; btcx.btce; ...`
import btce
import mtgox
import cfgmanager
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
<commit_before>__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
<commit_msg>Support for `import btcx; btcx.btce; ...`<commit_after>
import btce
import mtgox
import cfgmanager
__version__ = "0.0.1"
VERSION = (0, 0, 1, "handle-with-care")
|
3a8c738d8696f31f7024691d56b5edc411289b1b
|
registries/views.py
|
registries/views.py
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
"""
get:
Return a list of all registered drilling organizations
post:
Create a new drilling organization instance
"""
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
"""
get:
Return the specified drilling organization
patch:
Updates the specified drilling organization with the fields/values provided in the request body
delete:
Removes the specified drilling organization record
"""
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
Add docstrings to view classes
|
Add docstrings to view classes
|
Python
|
apache-2.0
|
bcgov/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells,rstens/gwells,rstens/gwells,bcgov/gwells,rstens/gwells
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
Add docstrings to view classes
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
"""
get:
Return a list of all registered drilling organizations
post:
Create a new drilling organization instance
"""
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
"""
get:
Return the specified drilling organization
patch:
Updates the specified drilling organization with the fields/values provided in the request body
delete:
Removes the specified drilling organization record
"""
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
<commit_before>from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
<commit_msg>Add docstrings to view classes<commit_after>
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
"""
get:
Return a list of all registered drilling organizations
post:
Create a new drilling organization instance
"""
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
"""
get:
Return the specified drilling organization
patch:
Updates the specified drilling organization with the fields/values provided in the request body
delete:
Removes the specified drilling organization record
"""
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
Add docstrings to view classesfrom django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
"""
get:
Return a list of all registered drilling organizations
post:
Create a new drilling organization instance
"""
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
"""
get:
Return the specified drilling organization
patch:
Updates the specified drilling organization with the fields/values provided in the request body
delete:
Removes the specified drilling organization record
"""
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
<commit_before>from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
<commit_msg>Add docstrings to view classes<commit_after>from django.http import HttpResponse
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework.response import Response
from registries.models import Organization
from registries.serializers import DrillerListSerializer, DrillerSerializer
class APIDrillerListCreateView(ListCreateAPIView):
"""
get:
Return a list of all registered drilling organizations
post:
Create a new drilling organization instance
"""
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerSerializer
def list(self, request):
queryset = self.get_queryset()
serializer = DrillerListSerializer(queryset, many=True)
return Response(serializer.data)
class APIDrillerRetrieveUpdateDestroyView(RetrieveUpdateDestroyAPIView):
"""
get:
Return the specified drilling organization
patch:
Updates the specified drilling organization with the fields/values provided in the request body
delete:
Removes the specified drilling organization record
"""
queryset = Organization.objects.all()
lookup_field = "org_guid"
serializer_class = DrillerSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
1af9ad69ff57d43fa009967a2afd31aa4a610b00
|
helpers/__init__.py
|
helpers/__init__.py
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib']
def get_inc_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
"""Return the library path for SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib','/usr/local/lib']
def get_inc_dir():
"""Return the include directories for the SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
Fix spacing. Add docstrings to helpers
|
Fix spacing. Add docstrings to helpers
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,CTPUG/pygame_cffi,CTPUG/pygame_cffi
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib']
def get_inc_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
Fix spacing. Add docstrings to helpers
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
"""Return the library path for SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib','/usr/local/lib']
def get_inc_dir():
"""Return the include directories for the SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
<commit_before>import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib']
def get_inc_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
<commit_msg>Fix spacing. Add docstrings to helpers<commit_after>
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
"""Return the library path for SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib','/usr/local/lib']
def get_inc_dir():
"""Return the include directories for the SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib']
def get_inc_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
Fix spacing. Add docstrings to helpersimport os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
"""Return the library path for SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib','/usr/local/lib']
def get_inc_dir():
"""Return the include directories for the SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
<commit_before>import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib']
def get_inc_dir():
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
<commit_msg>Fix spacing. Add docstrings to helpers<commit_after>import os
import sys
import platform
# Various helpers for the build scripts
def get_lib_dir():
"""Return the library path for SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/lib']
else:
# 64 bit
return ['prebuilt-x64/lib']
return ['/usr/lib','/usr/local/lib']
def get_inc_dir():
"""Return the include directories for the SDL and other libraries.
Assumes we're using the pygame prebuilt zipfile on windows"""
if sys.platform.startswith("win"):
if platform.architecture()[0] == '32bit':
# 32 bit
return ['prebuilt-x86/include', 'prebuilt-x86/include/SDL']
else:
return ['prebuilt-x64/include', 'prebuilt-x64/include/SDL']
return ['/usr/include', '/usr/include/SDL', '/usr/local/include/SDL']
def get_c_lib(name):
"""Return the contents of a C library."""
filename = os.path.join(
os.path.dirname(__file__), '..', 'cffi_builders', 'lib', name)
with open(filename) as lib:
return lib.read()
__all__ = [get_inc_dir, get_lib_dir, get_c_lib]
|
e03cf2206733dc9f005375abef78238cf4011b50
|
dashi/config.py
|
dashi/config.py
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
@property
def first_name(self):
return self.config['name'].partition(' ')[0]
def __str__(self):
return 'User {}'.format(self.config['name'])
def __repr__(self):
return str(self)
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
def get_user(config, username):
matches = []
for user in config['users']:
for alias in user.aliases:
if username in alias and user not in matches:
matches.append(user)
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise Exception("Username '{}' matched {}".format(username, ', '.join([m['name'] for m in matches])))
else:
raise Exception("Unable to match user '{}'".format(username))
|
Add the ability to get users and represent them
|
Add the ability to get users and represent them
Also added a handy first name property for easy table display
|
Python
|
mit
|
EliRibble/dashi,EliRibble/dashi
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
Add the ability to get users and represent them
Also added a handy first name property for easy table display
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
@property
def first_name(self):
return self.config['name'].partition(' ')[0]
def __str__(self):
return 'User {}'.format(self.config['name'])
def __repr__(self):
return str(self)
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
def get_user(config, username):
matches = []
for user in config['users']:
for alias in user.aliases:
if username in alias and user not in matches:
matches.append(user)
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise Exception("Username '{}' matched {}".format(username, ', '.join([m['name'] for m in matches])))
else:
raise Exception("Unable to match user '{}'".format(username))
|
<commit_before>import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
<commit_msg>Add the ability to get users and represent them
Also added a handy first name property for easy table display<commit_after>
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
@property
def first_name(self):
return self.config['name'].partition(' ')[0]
def __str__(self):
return 'User {}'.format(self.config['name'])
def __repr__(self):
return str(self)
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
def get_user(config, username):
matches = []
for user in config['users']:
for alias in user.aliases:
if username in alias and user not in matches:
matches.append(user)
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise Exception("Username '{}' matched {}".format(username, ', '.join([m['name'] for m in matches])))
else:
raise Exception("Unable to match user '{}'".format(username))
|
import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
Add the ability to get users and represent them
Also added a handy first name property for easy table displayimport json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
@property
def first_name(self):
return self.config['name'].partition(' ')[0]
def __str__(self):
return 'User {}'.format(self.config['name'])
def __repr__(self):
return str(self)
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
def get_user(config, username):
matches = []
for user in config['users']:
for alias in user.aliases:
if username in alias and user not in matches:
matches.append(user)
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise Exception("Username '{}' matched {}".format(username, ', '.join([m['name'] for m in matches])))
else:
raise Exception("Unable to match user '{}'".format(username))
|
<commit_before>import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
<commit_msg>Add the ability to get users and represent them
Also added a handy first name property for easy table display<commit_after>import json
import logging
import os
LOGGER = logging.getLogger(__name__)
class User():
def __init__(self, config):
self.config = config
@property
def aliases(self):
return [self.config['name']] + self.config.get('aliases', [])
@property
def first_name(self):
return self.config['name'].partition(' ')[0]
def __str__(self):
return 'User {}'.format(self.config['name'])
def __repr__(self):
return str(self)
def _load_config():
for path in ['dashi.conf', os.path.join(os.environ['HOME'], '.dashi'), '/etc/dashi.conf']:
try:
with open(path, 'r') as f:
return json.load(f)
except FileNotFoundError:
LOGGER.info("Unable to read config file at %s", path)
except ValueError as e:
LOGGER.warning("Failed to parse config file %s: %s", path, e)
raise Exception("Unable to load any configuration files")
def parse():
config = _load_config()
config['users'] = [User(c) for c in config['users']]
return config
def get_user(config, username):
matches = []
for user in config['users']:
for alias in user.aliases:
if username in alias and user not in matches:
matches.append(user)
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise Exception("Username '{}' matched {}".format(username, ', '.join([m['name'] for m in matches])))
else:
raise Exception("Unable to match user '{}'".format(username))
|
8b538c452242050e468b71ca937e3d4feb57887b
|
mopidy/backends/stream/__init__.py
|
mopidy/backends/stream/__init__.py
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.STREAM_PROTOCOLS`
"""
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return '[ext.stream]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.stream]
# If the stream extension should be enabled or not
enabled = true
# Whitelist of URI schemas to support streaming from
protocols =
http
https
mms
rtmp
rtmps
rtsp
"""
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['protocols'] = config.List()
return schema
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
Add default config and config schema
|
stream: Add default config and config schema
|
Python
|
apache-2.0
|
tkem/mopidy,jcass77/mopidy,jmarsik/mopidy,ZenithDK/mopidy,swak/mopidy,vrs01/mopidy,diandiankan/mopidy,quartz55/mopidy,adamcik/mopidy,abarisain/mopidy,liamw9534/mopidy,vrs01/mopidy,tkem/mopidy,dbrgn/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,abarisain/mopidy,glogiotatidis/mopidy,hkariti/mopidy,mopidy/mopidy,mokieyue/mopidy,mopidy/mopidy,vrs01/mopidy,ali/mopidy,ZenithDK/mopidy,tkem/mopidy,SuperStarPL/mopidy,hkariti/mopidy,ali/mopidy,glogiotatidis/mopidy,adamcik/mopidy,jmarsik/mopidy,kingosticks/mopidy,ZenithDK/mopidy,kingosticks/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,priestd09/mopidy,rawdlite/mopidy,bacontext/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,hkariti/mopidy,mopidy/mopidy,kingosticks/mopidy,diandiankan/mopidy,mokieyue/mopidy,rawdlite/mopidy,swak/mopidy,priestd09/mopidy,ali/mopidy,bacontext/mopidy,bencevans/mopidy,jodal/mopidy,quartz55/mopidy,mokieyue/mopidy,jodal/mopidy,bencevans/mopidy,quartz55/mopidy,quartz55/mopidy,dbrgn/mopidy,mokieyue/mopidy,jmarsik/mopidy,rawdlite/mopidy,bencevans/mopidy,vrs01/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,hkariti/mopidy,rawdlite/mopidy,priestd09/mopidy,woutervanwijk/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,bacontext/mopidy,dbrgn/mopidy,jodal/mopidy,swak/mopidy,jcass77/mopidy,pacificIT/mopidy,adamcik/mopidy,bencevans/mopidy,ali/mopidy,bacontext/mopidy,swak/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,jcass77/mopidy,woutervanwijk/mopidy
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.STREAM_PROTOCOLS`
"""
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return '[ext.stream]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
stream: Add default config and config schema
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.stream]
# If the stream extension should be enabled or not
enabled = true
# Whitelist of URI schemas to support streaming from
protocols =
http
https
mms
rtmp
rtmps
rtsp
"""
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['protocols'] = config.List()
return schema
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
<commit_before>from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.STREAM_PROTOCOLS`
"""
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return '[ext.stream]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
<commit_msg>stream: Add default config and config schema<commit_after>
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.stream]
# If the stream extension should be enabled or not
enabled = true
# Whitelist of URI schemas to support streaming from
protocols =
http
https
mms
rtmp
rtmps
rtsp
"""
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['protocols'] = config.List()
return schema
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.STREAM_PROTOCOLS`
"""
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return '[ext.stream]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
stream: Add default config and config schemafrom __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.stream]
# If the stream extension should be enabled or not
enabled = true
# Whitelist of URI schemas to support streaming from
protocols =
http
https
mms
rtmp
rtmps
rtsp
"""
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['protocols'] = config.List()
return schema
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
<commit_before>from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.STREAM_PROTOCOLS`
"""
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return '[ext.stream]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
<commit_msg>stream: Add default config and config schema<commit_after>from __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.stream]
# If the stream extension should be enabled or not
enabled = true
# Whitelist of URI schemas to support streaming from
protocols =
http
https
mms
rtmp
rtmps
rtsp
"""
__doc__ = """A backend for playing music for streaming music.
This backend will handle streaming of URIs in
:attr:`mopidy.settings.STREAM_PROTOCOLS` assuming the right plugins are
installed.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Stream+backend
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-Stream'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['protocols'] = config.List()
return schema
def validate_environment(self):
pass
def get_backend_classes(self):
from .actor import StreamBackend
return [StreamBackend]
|
ad35ec7d4adb91e79bd3382f0846e9fff2a417c7
|
osf/management/commands/update_preprint_share_dates.py
|
osf/management/commands/update_preprint_share_dates.py
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db.models import F
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
if dry_run:
logger.info('Would have sent ' + preprint._id + ' data to SHARE')
else:
on_preprint_updated(preprint._id)
logger.info(preprint._id + ' data sent to SHARE')
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to SHARE',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
update_share_preprint_modified_dates(dry_run)
|
Fix SHARE capitalization, use self-referential query
|
Fix SHARE capitalization, use self-referential query
|
Python
|
apache-2.0
|
sloria/osf.io,chrisseto/osf.io,mattclark/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,caseyrollins/osf.io,erinspace/osf.io,aaxelb/osf.io,sloria/osf.io,aaxelb/osf.io,binoculars/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,leb2dg/osf.io,adlius/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,laurenrevere/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,icereval/osf.io,saradbowman/osf.io,felliott/osf.io,baylee-d/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,crcresearch/osf.io,erinspace/osf.io,icereval/osf.io,baylee-d/osf.io,binoculars/osf.io,caseyrollins/osf.io,adlius/osf.io,binoculars/osf.io,crcresearch/osf.io,cslzchen/osf.io,felliott/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mfraezz/osf.io,TomBaxter/osf.io,aaxelb/osf.io,TomBaxter/osf.io,leb2dg/osf.io,pattisdr/osf.io,laurenrevere/osf.io,chrisseto/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,felliott/osf.io,mattclark/osf.io,leb2dg/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,TomBaxter/osf.io,cslzchen/osf.io,erinspace/osf.io,cslzchen/osf.io,chennan47/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,sloria/osf.io,felliott/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
Fix SHARE capitalization, use self-referential query
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db.models import F
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
if dry_run:
logger.info('Would have sent ' + preprint._id + ' data to SHARE')
else:
on_preprint_updated(preprint._id)
logger.info(preprint._id + ' data sent to SHARE')
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to SHARE',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
update_share_preprint_modified_dates(dry_run)
|
<commit_before>from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
<commit_msg>Fix SHARE capitalization, use self-referential query<commit_after>
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db.models import F
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
if dry_run:
logger.info('Would have sent ' + preprint._id + ' data to SHARE')
else:
on_preprint_updated(preprint._id)
logger.info(preprint._id + ' data sent to SHARE')
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to SHARE',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
update_share_preprint_modified_dates(dry_run)
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
Fix SHARE capitalization, use self-referential queryfrom __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db.models import F
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
if dry_run:
logger.info('Would have sent ' + preprint._id + ' data to SHARE')
else:
on_preprint_updated(preprint._id)
logger.info(preprint._id + ' data sent to SHARE')
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to SHARE',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
update_share_preprint_modified_dates(dry_run)
|
<commit_before>from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
<commit_msg>Fix SHARE capitalization, use self-referential query<commit_after>from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db.models import F
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
if dry_run:
logger.info('Would have sent ' + preprint._id + ' data to SHARE')
else:
on_preprint_updated(preprint._id)
logger.info(preprint._id + ' data sent to SHARE')
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to SHARE',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
update_share_preprint_modified_dates(dry_run)
|
46cfd25a4acf075650a5471c388457cb04cd9a15
|
invenio_mail/api.py
|
invenio_mail/api.py
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx={},
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx=None,
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
ctx = ctx if ctx else {}
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
Use sentinel value for ctx
|
Use sentinel value for ctx
|
Python
|
mit
|
inveniosoftware/invenio-mail,inveniosoftware/invenio-mail,inveniosoftware/invenio-mail
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx={},
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
Use sentinel value for ctx
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx=None,
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
ctx = ctx if ctx else {}
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx={},
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
<commit_msg>Use sentinel value for ctx<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx=None,
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
ctx = ctx if ctx else {}
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx={},
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
Use sentinel value for ctx# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx=None,
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
ctx = ctx if ctx else {}
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx={},
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
<commit_msg>Use sentinel value for ctx<commit_after># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Template based messages."""
from __future__ import absolute_import, print_function
from flask import render_template
from flask_mail import Message
class TemplatedMessage(Message):
"""Siplify creation of templated messages."""
def __init__(self, template_body=None, template_html=None, ctx=None,
**kwargs):
r"""Build message body and HTML based on provided templates.
Provided templates can use keyword arguments ``body`` and ``html``
respectively.
:param template_body: Path to the text template.
:param template_html: Path to the html template.
:param ctx: A mapping containing additional information passed to the
template.
:param \*\*kwargs: Keyword arguments as defined in
:class:`flask_mail.Message`.
"""
ctx = ctx if ctx else {}
if template_body:
kwargs['body'] = render_template(
template_body, body=kwargs.get('body'), **ctx
)
if template_html:
kwargs['html'] = render_template(
template_html, html=kwargs.get('html'), **ctx
)
super(TemplatedMessage, self).__init__(**kwargs)
|
b11ac934c95e4bbaee46ae2b73c3e7129acc06f3
|
salt/modules/key.py
|
salt/modules/key.py
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
|
Use hash_type param for pem_finger
|
Use hash_type param for pem_finger
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
Use hash_type param for pem_finger
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
|
<commit_before># -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
<commit_msg>Use hash_type param for pem_finger<commit_after>
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
|
# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
Use hash_type param for pem_finger# -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
|
<commit_before># -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
<commit_msg>Use hash_type param for pem_finger<commit_after># -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
# MD5 here is temporary. Change to SHA256 when retired.
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
|
590494bf9d840cb6353260392b94700656db5d47
|
fabfile/__init__.py
|
fabfile/__init__.py
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
Fix super dumb mistake causing all test runs to hit tests folder.
|
Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!
|
Python
|
bsd-2-clause
|
cgvarela/fabric,raimon49/fabric,elijah513/fabric,StackStorm/fabric,amaniak/fabric,kmonsoor/fabric,mathiasertl/fabric,opavader/fabric,bspink/fabric,fernandezcuesta/fabric,pgroudas/fabric,likesxuqiang/fabric,xLegoz/fabric,tekapo/fabric,jaraco/fabric,bitmonk/fabric,sdelements/fabric,kxxoling/fabric,itoed/fabric,SamuelMarks/fabric,qinrong/fabric,TarasRudnyk/fabric,cmattoon/fabric,haridsv/fabric,rodrigc/fabric,ploxiln/fabric,askulkarni2/fabric,rane-hs/fabric-py3,rbramwell/fabric,tolbkni/fabric
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
<commit_before>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
<commit_msg>Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!<commit_after>
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
<commit_before>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
<commit_msg>Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!<commit_after>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
efbcd8104470234e50ad2e40719b0edf1fbc45c4
|
zou/app/utils/date_helpers.py
|
zou/app/utils/date_helpers.py
|
from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
|
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
Add helper to handle timezone in date strings
|
[utils] Add helper to handle timezone in date strings
|
Python
|
agpl-3.0
|
cgwire/zou
|
from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
[utils] Add helper to handle timezone in date strings
|
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
<commit_before>from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
<commit_msg>[utils] Add helper to handle timezone in date strings<commit_after>
|
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
[utils] Add helper to handle timezone in date stringsfrom babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
<commit_before>from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
<commit_msg>[utils] Add helper to handle timezone in date strings<commit_after>from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
2f8206d5d2ef699b368d4e2b0c87f1f9d5b0dd64
|
setup_extensions.py
|
setup_extensions.py
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"extern/glew-1.11.0/lib/Release/x64/glew32",
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"GLEW",
"SDL2",
"SDL2main",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
Use the generic names for these as well.
|
Use the generic names for these as well.
|
Python
|
bsd-3-clause
|
Pink-Silver/PyDoom,Pink-Silver/PyDoom
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"extern/glew-1.11.0/lib/Release/x64/glew32",
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
Use the generic names for these as well.
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"GLEW",
"SDL2",
"SDL2main",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
<commit_before>#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"extern/glew-1.11.0/lib/Release/x64/glew32",
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
<commit_msg>Use the generic names for these as well.<commit_after>
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"GLEW",
"SDL2",
"SDL2main",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"extern/glew-1.11.0/lib/Release/x64/glew32",
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
Use the generic names for these as well.#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"GLEW",
"SDL2",
"SDL2main",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
<commit_before>#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"extern/glew-1.11.0/lib/Release/x64/glew32",
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
<commit_msg>Use the generic names for these as well.<commit_after>#!python3
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup (
name = 'PyDoom rendering module',
ext_modules = cythonize (
[
Extension (
"pydoom.extensions.video",
["pydoom/extensions/video.pyx", "pydoom/extensions/cvideo.c"],
include_dirs = [
"extern/glew-1.11.0/include",
"extern/SDL2-2.0.3/include",
],
libraries = [
"GL",
"GLEW",
"SDL2",
"SDL2main",
]
),
Extension (
"pydoom.extensions.utility",
["pydoom/extensions/utility.pyx", "pydoom/extensions/cutility.c"],
include_dirs = [
"extern/SDL2-2.0.3/include"
],
libraries = [
"extern/SDL2-2.0.3/lib/x64/SDL2",
"extern/SDL2-2.0.3/lib/x64/SDL2main",
"extern/SDL2-2.0.3/lib/x64/SDL2test",
]
)
]
)
)
|
2df850a4fbe1d063134e53f93623d81da8fd3cda
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.17.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.17.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.17.2
|
Increment version number to 0.17.2
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.17.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.17.2
|
"""Configuration for Django system."""
__version__ = "0.17.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.17.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.17.2<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.17.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.17.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.17.2"""Configuration for Django system."""
__version__ = "0.17.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.17.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.17.2<commit_after>"""Configuration for Django system."""
__version__ = "0.17.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
336cdd2619df5fe60a3b0a8a8a91b34b7c1b2ee4
|
grokapi/queries.py
|
grokapi/queries.py
|
# -*- coding: utf-8 -*-
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
base_url = "http://stats.grok.se/json/"
return base_url + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
# -*- coding: utf-8 -*-
BASE_URL = "http://stats.grok.se/json/"
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
return BASE_URL + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
Make base_url a global variable
|
Make base_url a global variable
|
Python
|
mit
|
Commonists/Grokapi
|
# -*- coding: utf-8 -*-
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
base_url = "http://stats.grok.se/json/"
return base_url + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
Make base_url a global variable
|
# -*- coding: utf-8 -*-
BASE_URL = "http://stats.grok.se/json/"
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
return BASE_URL + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
<commit_before># -*- coding: utf-8 -*-
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
base_url = "http://stats.grok.se/json/"
return base_url + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
<commit_msg>Make base_url a global variable<commit_after>
|
# -*- coding: utf-8 -*-
BASE_URL = "http://stats.grok.se/json/"
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
return BASE_URL + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
# -*- coding: utf-8 -*-
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
base_url = "http://stats.grok.se/json/"
return base_url + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
Make base_url a global variable# -*- coding: utf-8 -*-
BASE_URL = "http://stats.grok.se/json/"
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
return BASE_URL + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
<commit_before># -*- coding: utf-8 -*-
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
base_url = "http://stats.grok.se/json/"
return base_url + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
<commit_msg>Make base_url a global variable<commit_after># -*- coding: utf-8 -*-
BASE_URL = "http://stats.grok.se/json/"
class Grok(object):
"""stats.grok.se article statistics."""
def __init__(self, title, site):
self.site = site
self.title = title
def _make_url(self, year, month):
"""Make the URL to the JSON output of stats.grok.se service."""
return BASE_URL + "{0:s}/{1:d}{2:02d}/{3:s}".format(self.site, year, month, self.title)
|
538f4b2d0e030a9256ecd68eaf0a1a2e5d649f49
|
haas/tests/mocks.py
|
haas/tests/mocks.py
|
import itertools
import traceback
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter(itertools.repeat(ret))
def utcnow(self):
return next(self.ret)
|
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter((ret,))
def utcnow(self):
try:
return next(self.ret)
except StopIteration:
raise ValueError('No more mock values!')
|
Raise error in mock if there are not enough mock datetime values
|
Raise error in mock if there are not enough mock datetime values
|
Python
|
bsd-3-clause
|
sjagoe/haas,itziakos/haas,itziakos/haas,scalative/haas,sjagoe/haas,scalative/haas
|
import itertools
import traceback
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter(itertools.repeat(ret))
def utcnow(self):
return next(self.ret)
Raise error in mock if there are not enough mock datetime values
|
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter((ret,))
def utcnow(self):
try:
return next(self.ret)
except StopIteration:
raise ValueError('No more mock values!')
|
<commit_before>import itertools
import traceback
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter(itertools.repeat(ret))
def utcnow(self):
return next(self.ret)
<commit_msg>Raise error in mock if there are not enough mock datetime values<commit_after>
|
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter((ret,))
def utcnow(self):
try:
return next(self.ret)
except StopIteration:
raise ValueError('No more mock values!')
|
import itertools
import traceback
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter(itertools.repeat(ret))
def utcnow(self):
return next(self.ret)
Raise error in mock if there are not enough mock datetime valuesclass MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter((ret,))
def utcnow(self):
try:
return next(self.ret)
except StopIteration:
raise ValueError('No more mock values!')
|
<commit_before>import itertools
import traceback
class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter(itertools.repeat(ret))
def utcnow(self):
return next(self.ret)
<commit_msg>Raise error in mock if there are not enough mock datetime values<commit_after>class MockDateTime(object):
def __init__(self, ret):
try:
self.ret = iter(ret)
except TypeError:
self.ret = iter((ret,))
def utcnow(self):
try:
return next(self.ret)
except StopIteration:
raise ValueError('No more mock values!')
|
2141e4fd2b09d3a8a95e032fb02eafb9e6f818c9
|
i3pystatus/shell.py
|
i3pystatus/shell.py
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
Add exception handling for output
|
Add exception handling for output
|
Python
|
mit
|
opatut/i3pystatus,teto/i3pystatus,schroeji/i3pystatus,ncoop/i3pystatus,juliushaertl/i3pystatus,m45t3r/i3pystatus,richese/i3pystatus,claria/i3pystatus,ncoop/i3pystatus,paulollivier/i3pystatus,paulollivier/i3pystatus,ismaelpuerto/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,fmarchenko/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,yang-ling/i3pystatus,yang-ling/i3pystatus,MaicoTimmerman/i3pystatus,richese/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,schroeji/i3pystatus,juliushaertl/i3pystatus,drwahl/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,drwahl/i3pystatus,facetoe/i3pystatus,Arvedui/i3pystatus,m45t3r/i3pystatus,Arvedui/i3pystatus,Elder-of-Ozone/i3pystatus,fmarchenko/i3pystatus,enkore/i3pystatus,opatut/i3pystatus,facetoe/i3pystatus,claria/i3pystatus,Elder-of-Ozone/i3pystatus,enkore/i3pystatus,MaicoTimmerman/i3pystatus,ismaelpuerto/i3pystatus,teto/i3pystatus
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
Add exception handling for output
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
<commit_before>from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
<commit_msg>Add exception handling for output<commit_after>
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
Add exception handling for outputfrom i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
<commit_before>from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
<commit_msg>Add exception handling for output<commit_after>from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
2477822fa7589e4968465b56e77885378d30bbc5
|
first/polls/admin.py
|
first/polls/admin.py
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Choice)
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
admin.site.register(Question, QuestionAdmin)
|
Make Choice object editable on Question Admin page
|
Make Choice object editable on Question Admin page
|
Python
|
mit
|
ugaliguy/Django-Tutorial-Projects,ugaliguy/Django-Tutorial-Projects
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Choice)Make Choice object editable on Question Admin page
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
admin.site.register(Question, QuestionAdmin)
|
<commit_before>from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Choice)<commit_msg>Make Choice object editable on Question Admin page<commit_after>
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
admin.site.register(Question, QuestionAdmin)
|
from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Choice)Make Choice object editable on Question Admin pagefrom django.contrib import admin
from .models import Choice, Question
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
admin.site.register(Question, QuestionAdmin)
|
<commit_before>from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Choice)<commit_msg>Make Choice object editable on Question Admin page<commit_after>from django.contrib import admin
from .models import Choice, Question
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
admin.site.register(Question, QuestionAdmin)
|
b32b047656abd28dd794ee16dfab682337a753b1
|
accounts/tests.py
|
accounts/tests.py
|
from django.test import TestCase
# Create your tests here.
|
"""accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
|
Add first unit test for welcome page
|
Add first unit test for welcome page
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
from django.test import TestCase
# Create your tests here.
Add first unit test for welcome page
|
"""accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add first unit test for welcome page<commit_after>
|
"""accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
|
from django.test import TestCase
# Create your tests here.
Add first unit test for welcome page"""accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add first unit test for welcome page<commit_after>"""accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
|
bd4643e35a9c75d15bb6a4bfef63774fdd8bee5b
|
test/regress/cbrt.cpp.py
|
test/regress/cbrt.cpp.py
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
Add a typical 2-component case. Comment out a case that fail until integer support is fixed.
|
Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075
|
Python
|
lgpl-2.1
|
libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
<commit_before>#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
<commit_msg>Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075<commit_after>
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
<commit_before>#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
<commit_msg>Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075<commit_after>#!/usr/bin/python
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
0b0b25da0b43166dfca4a95930bed3409dfb7ba1
|
tests/test_parse.py
|
tests/test_parse.py
|
from . import TestCase
import bitmath
class TestBasicMath(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
from . import TestCase
import bitmath
class TestParse(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
Fix oops with test case copy/pasta.
|
Fix oops with test case copy/pasta.
|
Python
|
mit
|
tbielawa/bitmath,pombredanne/bitmath,pombredanne/bitmath,tbielawa/bitmath
|
from . import TestCase
import bitmath
class TestBasicMath(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
Fix oops with test case copy/pasta.
|
from . import TestCase
import bitmath
class TestParse(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
<commit_before>from . import TestCase
import bitmath
class TestBasicMath(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
<commit_msg>Fix oops with test case copy/pasta.<commit_after>
|
from . import TestCase
import bitmath
class TestParse(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
from . import TestCase
import bitmath
class TestBasicMath(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
Fix oops with test case copy/pasta.from . import TestCase
import bitmath
class TestParse(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
<commit_before>from . import TestCase
import bitmath
class TestBasicMath(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
<commit_msg>Fix oops with test case copy/pasta.<commit_after>from . import TestCase
import bitmath
class TestParse(TestCase):
def test_b(self):
self.assertEqual(
bitmath.parse_string("123b"),
bitmath.Bit(123))
def test_B(self):
self.assertEqual(
bitmath.parse_string("321B"),
bitmath.Byte(321))
def test_Gb(self):
self.assertEqual(
bitmath.parse_string("456Gb"),
bitmath.Gb(456))
def test_MiB(self):
self.assertEqual(
bitmath.parse_string("654 MiB"),
bitmath.MiB(654))
def test_bad_float(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23.45 kb")
def test_bad_unit(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 GIB")
def test_bad_unit2(self):
with self.assertRaises(ValueError):
bitmath.parse_string("1.23 QB")
|
dc622e41059c75da619f90423e35c35d8a3730d4
|
tests/test_qccfg.py
|
tests/test_qccfg.py
|
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
import pkg_resources
import json
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_cfg_json():
""" All config files should comply with json format
In the future, when move load cfg outside, refactor here.
"""
cfgfiles = [f for f in
pkg_resources.resource_listdir('cotede', 'qc_cfg')
if f[-5:] == ".json"]
for cfgfile in cfgfiles:
cfg = json.loads(pkg_resources.resource_string('cotede',
"qc_cfg/%s" % cfgfile))
assert type(cfg) is dict
for k in cfg.keys():
assert len(cfg[k]) > 0
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
Test if all QC cfg are proper json files.
|
Test if all QC cfg are proper json files.
|
Python
|
bsd-3-clause
|
castelao/CoTeDe
|
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
Test if all QC cfg are proper json files.
|
import pkg_resources
import json
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_cfg_json():
""" All config files should comply with json format
In the future, when move load cfg outside, refactor here.
"""
cfgfiles = [f for f in
pkg_resources.resource_listdir('cotede', 'qc_cfg')
if f[-5:] == ".json"]
for cfgfile in cfgfiles:
cfg = json.loads(pkg_resources.resource_string('cotede',
"qc_cfg/%s" % cfgfile))
assert type(cfg) is dict
for k in cfg.keys():
assert len(cfg[k]) > 0
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
<commit_before>import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
<commit_msg>Test if all QC cfg are proper json files.<commit_after>
|
import pkg_resources
import json
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_cfg_json():
""" All config files should comply with json format
In the future, when move load cfg outside, refactor here.
"""
cfgfiles = [f for f in
pkg_resources.resource_listdir('cotede', 'qc_cfg')
if f[-5:] == ".json"]
for cfgfile in cfgfiles:
cfg = json.loads(pkg_resources.resource_string('cotede',
"qc_cfg/%s" % cfgfile))
assert type(cfg) is dict
for k in cfg.keys():
assert len(cfg[k]) > 0
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
Test if all QC cfg are proper json files.
import pkg_resources
import json
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_cfg_json():
""" All config files should comply with json format
In the future, when move load cfg outside, refactor here.
"""
cfgfiles = [f for f in
pkg_resources.resource_listdir('cotede', 'qc_cfg')
if f[-5:] == ".json"]
for cfgfile in cfgfiles:
cfg = json.loads(pkg_resources.resource_string('cotede',
"qc_cfg/%s" % cfgfile))
assert type(cfg) is dict
for k in cfg.keys():
assert len(cfg[k]) > 0
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
<commit_before>import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
<commit_msg>Test if all QC cfg are proper json files.<commit_after>
import pkg_resources
import json
import numpy as np
from seabird import cnv
import cotede.qc
from cotede.utils.supportdata import download_testdata
def test_cfg_json():
""" All config files should comply with json format
In the future, when move load cfg outside, refactor here.
"""
cfgfiles = [f for f in
pkg_resources.resource_listdir('cotede', 'qc_cfg')
if f[-5:] == ".json"]
for cfgfile in cfgfiles:
cfg = json.loads(pkg_resources.resource_string('cotede',
"qc_cfg/%s" % cfgfile))
assert type(cfg) is dict
for k in cfg.keys():
assert len(cfg[k]) > 0
def test_multiple_cfg():
""" I should think about a way to test if the output make sense.
"""
datafile = download_testdata("dPIRX010.cnv")
data = cnv.fCNV(datafile)
pqc = cotede.qc.ProfileQC(data)
pqc = cotede.qc.ProfileQC(data, cfg='cotede')
pqc = cotede.qc.ProfileQC(data, cfg='gtspp')
pqc = cotede.qc.ProfileQC(data, cfg='eurogoos')
# Manually defined
pqc = cotede.qc.ProfileQC(data, cfg={'TEMP': {"spike": 6.0,}})
assert len(pqc.flags) > 0
|
8eb66d72452d69d683a576c75cdf2be72b2370fa
|
tests/test_utils.py
|
tests/test_utils.py
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_formate(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_format(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
def test_increment_page_number_good_format(sample_page_good_format):
chapter = sample_page_good_format["chapter"]
page = sample_page_good_format["page"]
current_page = utils.build_img_path(chapter, page)
next_page = utils.increment_page_number(current_page)
expected_output = '/manga_ch1/x_v001-002'
assert next_page == expected_output
|
Add test for inc page num, good format
|
Add test for inc page num, good format
|
Python
|
mit
|
ma3lstrom/manga-cork,ma3lstrom/manga-cork,ma3lstrom/manga-cork
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_formate(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
Add test for inc page num, good format
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_format(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
def test_increment_page_number_good_format(sample_page_good_format):
chapter = sample_page_good_format["chapter"]
page = sample_page_good_format["page"]
current_page = utils.build_img_path(chapter, page)
next_page = utils.increment_page_number(current_page)
expected_output = '/manga_ch1/x_v001-002'
assert next_page == expected_output
|
<commit_before>import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_formate(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
<commit_msg>Add test for inc page num, good format<commit_after>
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_format(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
def test_increment_page_number_good_format(sample_page_good_format):
chapter = sample_page_good_format["chapter"]
page = sample_page_good_format["page"]
current_page = utils.build_img_path(chapter, page)
next_page = utils.increment_page_number(current_page)
expected_output = '/manga_ch1/x_v001-002'
assert next_page == expected_output
|
import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_formate(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
Add test for inc page num, good formatimport pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_format(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
def test_increment_page_number_good_format(sample_page_good_format):
chapter = sample_page_good_format["chapter"]
page = sample_page_good_format["page"]
current_page = utils.build_img_path(chapter, page)
next_page = utils.increment_page_number(current_page)
expected_output = '/manga_ch1/x_v001-002'
assert next_page == expected_output
|
<commit_before>import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_formate(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
<commit_msg>Add test for inc page num, good format<commit_after>import pytest
from mangacork import utils
@pytest.fixture
def sample_page_bad_format():
sample_page = {'chapter': "chapter1", 'page': 3}
return sample_page
@pytest.fixture
def sample_page_good_format():
sample_page = {'chapter':'manga_ch1', 'page':'x_v001-001'}
return sample_page
def test_build_img_path(sample_page_bad_format):
chapter = sample_page_bad_format["chapter"]
page = sample_page_bad_format["page"]
expected_output = "/chapter1/3"
assert utils.build_img_path(chapter,page) == expected_output
def test_increment_page_number_bad_format(sample_page_bad_format):
with pytest.raises(ValueError):
current_page = utils.build_img_path(sample_page_bad_format["chapter"],
sample_page_bad_format["page"])
utils.increment_page_number(current_page)
def test_increment_page_number_good_format(sample_page_good_format):
chapter = sample_page_good_format["chapter"]
page = sample_page_good_format["page"]
current_page = utils.build_img_path(chapter, page)
next_page = utils.increment_page_number(current_page)
expected_output = '/manga_ch1/x_v001-002'
assert next_page == expected_output
|
2ab601492a76be5d32a2e1d5009c150269e5fb03
|
src/interviews/managers.py
|
src/interviews/managers.py
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs)
return super(InterviewManager, self).none()
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs).order_by('slug')
return super(InterviewManager, self).none()
|
Order `most_read` queryset by slug.
|
Order `most_read` queryset by slug.
|
Python
|
mit
|
vermpy/thespotlight,vermpy/thespotlight,vermpy/thespotlight
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs)
return super(InterviewManager, self).none()
Order `most_read` queryset by slug.
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs).order_by('slug')
return super(InterviewManager, self).none()
|
<commit_before>import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs)
return super(InterviewManager, self).none()
<commit_msg>Order `most_read` queryset by slug.<commit_after>
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs).order_by('slug')
return super(InterviewManager, self).none()
|
import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs)
return super(InterviewManager, self).none()
Order `most_read` queryset by slug.import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs).order_by('slug')
return super(InterviewManager, self).none()
|
<commit_before>import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs)
return super(InterviewManager, self).none()
<commit_msg>Order `most_read` queryset by slug.<commit_after>import logging
from datetime import timedelta
from django.db import models
from django.utils import timezone
from .google_analytics import get_most_read_pages
logger = logging.getLogger(__name__)
class InterviewManager(models.Manager):
def active(self, *args, **kwargs):
return super(InterviewManager, self).filter(draft=False).filter(publish__lte=timezone.now())
def newest(self, *args, **kwargs):
return self.active().first()
def last_week(self, *args, **kwargs):
some_day_last_week = timezone.now().date() - timedelta(days=7)
monday_of_last_week = some_day_last_week - timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + timedelta(days=7)
return super(InterviewManager, self).filter(draft=False).filter(publish__gte=monday_of_last_week,
publish__lt=monday_of_this_week)[:1]
def most_read(self, *args, **kwargs):
slugs = get_most_read_pages()
if slugs:
return self.active().filter(slug__in=slugs).order_by('slug')
return super(InterviewManager, self).none()
|
e42d20547add5b92df8c8ce56bb2340b7b63ced9
|
timpani/settings.py
|
timpani/settings.py
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
elif name == "display_name":
return value == "full_name" or value == "username"
elif name == "theme":
return value in getAvailableThemes()
|
Add validation display_name and theme
|
Add validation display_name and theme
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
Add validation display_name and theme
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
elif name == "display_name":
return value == "full_name" or value == "username"
elif name == "theme":
return value in getAvailableThemes()
|
<commit_before>from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
<commit_msg>Add validation display_name and theme<commit_after>
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
elif name == "display_name":
return value == "full_name" or value == "username"
elif name == "theme":
return value in getAvailableThemes()
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
Add validation display_name and themefrom . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
elif name == "display_name":
return value == "full_name" or value == "username"
elif name == "theme":
return value in getAvailableThemes()
|
<commit_before>from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
<commit_msg>Add validation display_name and theme<commit_after>from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
elif name == "display_name":
return value == "full_name" or value == "username"
elif name == "theme":
return value in getAvailableThemes()
|
753545cd10aa455ec0912843820e26d5c4903c8e
|
unleash/plugins/tox_tests.py
|
unleash/plugins/tox_tests.py
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(['tox'])
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(ve.get_binary('tox'))
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
Use get_binary in tox tests.
|
Use get_binary in tox tests.
|
Python
|
mit
|
mbr/unleash
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(['tox'])
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
Use get_binary in tox tests.
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(ve.get_binary('tox'))
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
<commit_before>from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(['tox'])
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
<commit_msg>Use get_binary in tox tests.<commit_after>
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(ve.get_binary('tox'))
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(['tox'])
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
Use get_binary in tox tests.from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(ve.get_binary('tox'))
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
<commit_before>from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(['tox'])
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
<commit_msg>Use get_binary in tox tests.<commit_after>from click import Option
import subprocess
from unleash.util import VirtualEnv
from .utils_tree import in_tmpexport
PLUGIN_NAME = 'tox_tests'
def setup(cli):
cli.commands['release'].params.append(Option(
['--tests/--no-tests', '-t/-T'], default=True,
help='Run unittests (default: enabled).'
))
def collect_info(ctx):
info = ctx['info']
info['tox_tests'] = ctx['opts']['tests']
if not ctx['commit'].path_exists('tox.ini'):
ctx['issues'].warn(
'No tox.ini found.',
'There was no tox.ini found in the project root. No tests will be '
'run on the release.')
info['tox_tests'] = False
def lint_release(ctx):
if not ctx['info']['tox_tests']:
return
ctx['log'].info('Running tox tests')
try:
ctx['log'].debug('Installing tox in a new virtualenv')
with VirtualEnv.temporary() as ve, in_tmpexport(ctx['commit']):
ve.pip_install('tox')
ctx['log'].debug('Running tests using tox')
ve.check_output(ve.get_binary('tox'))
except subprocess.CalledProcessError as e:
ctx['issues'].error('tox testing failed:\n{}'.format(e.output))
|
de8451c1c9d4122cfae6edeae4e17e11c21b4580
|
Core/src/org/sleuthkit/autopsy/examples/reportmodule.py
|
Core/src/org/sleuthkit/autopsy/examples/reportmodule.py
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getRelativeFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
Update sample Python report module for ReportModule interface API change
|
Update sample Python report module for ReportModule interface API change
|
Python
|
apache-2.0
|
millmanorama/autopsy,APriestman/autopsy,karlmortensen/autopsy,rcordovano/autopsy,eXcomm/autopsy,millmanorama/autopsy,sidheshenator/autopsy,esaunders/autopsy,APriestman/autopsy,mhmdfy/autopsy,dgrove727/autopsy,esaunders/autopsy,karlmortensen/autopsy,sidheshenator/autopsy,eXcomm/autopsy,esaunders/autopsy,dgrove727/autopsy,rcordovano/autopsy,dgrove727/autopsy,narfindustries/autopsy,karlmortensen/autopsy,sidheshenator/autopsy,mhmdfy/autopsy,eXcomm/autopsy,APriestman/autopsy,maxrp/autopsy,rcordovano/autopsy,mhmdfy/autopsy,wschaeferB/autopsy,eXcomm/autopsy,sidheshenator/autopsy,mhmdfy/autopsy,rcordovano/autopsy,esaunders/autopsy,rcordovano/autopsy,narfindustries/autopsy,wschaeferB/autopsy,karlmortensen/autopsy,maxrp/autopsy,maxrp/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,millmanorama/autopsy,maxrp/autopsy,narfindustries/autopsy,wschaeferB/autopsy,APriestman/autopsy,APriestman/autopsy,esaunders/autopsy,millmanorama/autopsy,APriestman/autopsy,rcordovano/autopsy,APriestman/autopsy
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
Update sample Python report module for ReportModule interface API change
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getRelativeFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
<commit_before>from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
<commit_msg>Update sample Python report module for ReportModule interface API change<commit_after>
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getRelativeFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
Update sample Python report module for ReportModule interface API changefrom java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getRelativeFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
<commit_before>from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
<commit_msg>Update sample Python report module for ReportModule interface API change<commit_after>from java.lang import System
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.autopsy.report import GeneralReportModuleAdapter
class SampleGeneralReportModule(GeneralReportModuleAdapter):
def getName(self):
return "Sample Report Module"
def getDescription(self):
return "A sample Jython report module"
def getRelativeFilePath(self):
return "sampleReport.txt"
def generateReport(self, reportPath, progressBar):
# There are two tasks to do.
progressBar.setIndeterminate(False)
progressBar.start()
progressBar.setMaximumProgress(2)
# Get files by created in last two weeks.
fileCount = 0
autopsyCase = Case.getCurrentCase()
sleuthkitCase = autopsyCase.getSleuthkitCase()
currentTime = System.currentTimeMillis() / 1000
minTime = currentTime - (14 * 24 * 60 * 60)
otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime)
for otherFile in otherFiles:
fileCount += 1
progressBar.increment()
# Write the result to the report file.
report = open(reportPath + '\\' + self.getFilePath(), 'w')
report.write("file count = %d" % fileCount)
report.close()
progressBar.increment()
progressBar.complete()
|
69f1013a11e540a93b1afff9da819d5f8028078a
|
utils/lit/tests/xunit-output.py
|
utils/lit/tests/xunit-output.py
|
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
# REQUIRES: shell
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
|
Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@332563 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm
|
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@332563 91177308-0d34-0410-b5e6-96231b3b80d8
|
# REQUIRES: shell
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
<commit_before># Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
<commit_msg>Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@332563 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
# REQUIRES: shell
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@332563 91177308-0d34-0410-b5e6-96231b3b80d8# REQUIRES: shell
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
<commit_before># Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
<commit_msg>Mark test with "REQUIRES: shell" since it directly invokes "sh" and was failing on Windows.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@332563 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after># REQUIRES: shell
# Check xunit output
# RUN: rm -rf %t.xunit.xml
# RUN: not %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output
# If xmllint is installed verify that the generated xml is well-formed
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
# CHECK: <testsuites>
# CHECK: <testsuite name='test-data' tests='1' failures='1' skipped='0'>
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
# CHECK-NEXT: <failure ><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
|
92b13c26c216a6ced37017041242ad410890c406
|
stats-to-datadog.py
|
stats-to-datadog.py
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
print "Got {} for {}".format(amount, topology)
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
Print something from stats script so you can see it works!
|
Print something from stats script so you can see it works!
|
Python
|
mit
|
evertrue/capillary,evertrue/capillary,keenlabs/capillary,evertrue/capillary,evertrue/capillary,keenlabs/capillary,keenlabs/capillary
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
Print something from stats script so you can see it works!
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
print "Got {} for {}".format(amount, topology)
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
<commit_before>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
<commit_msg>Print something from stats script so you can see it works!<commit_after>
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
print "Got {} for {}".format(amount, topology)
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
Print something from stats script so you can see it works!import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
print "Got {} for {}".format(amount, topology)
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
<commit_before>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
<commit_msg>Print something from stats script so you can see it works!<commit_after>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
print "Got {} for {}".format(amount, topology)
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
5dd758cd0b9b917968b16948db0f635db8571d92
|
jsonfield/utils.py
|
jsonfield/utils.py
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
Revert changes: freezegun has been updated.
|
Revert changes: freezegun has been updated.
|
Python
|
bsd-3-clause
|
chrismeyersfsu/django-jsonfield
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
Revert changes: freezegun has been updated.
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
<commit_before>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
<commit_msg>Revert changes: freezegun has been updated.<commit_after>
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
Revert changes: freezegun has been updated.import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
<commit_before>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
<commit_msg>Revert changes: freezegun has been updated.<commit_after>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
b6fc94d9c6b5015ad2dc882d454127d4b0a6ecee
|
django_foodbot/api/models.py
|
django_foodbot/api/models.py
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=60, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s' % (self.day, self.week)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=120, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s %s' % (self.day, self.week, self.meal)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
Increase character length for food
|
Increase character length for food
|
Python
|
mit
|
andela-kanyanwu/food-bot-review
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=60, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s' % (self.day, self.week)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
Increase character length for food
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=120, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s %s' % (self.day, self.week, self.meal)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
<commit_before>from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=60, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s' % (self.day, self.week)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
<commit_msg>Increase character length for food<commit_after>
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=120, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s %s' % (self.day, self.week, self.meal)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=60, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s' % (self.day, self.week)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
Increase character length for foodfrom django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=120, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s %s' % (self.day, self.week, self.meal)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
<commit_before>from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=60, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s' % (self.day, self.week)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
<commit_msg>Increase character length for food<commit_after>from django.db import models
class Menu(models.Model):
day = models.CharField(max_length=10, blank=False, null=False)
food = models.CharField(max_length=120, blank=False, null=False)
meal = models.CharField(max_length=10, blank=False, null=False)
option = models.IntegerField(null=False)
week = models.IntegerField(null=False)
class Meta:
ordering = ('-week',)
db_table = 'menu_table'
def __unicode__(self):
return u'%s %s %s' % (self.day, self.week, self.meal)
class Rating(models.Model):
date = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=20)
menu = models.ForeignKey(Menu, related_name='rating')
rate = models.IntegerField(blank=False, null=False)
comment = models.TextField(default='no comment', )
class Meta:
ordering = ('-date',)
db_table = 'rating'
def __unicode__(self):
return u'%s' % (self.date)
|
7ba020caae9e247335620b86f8e7f51d67787b83
|
test/helpers/action_creators.py
|
test/helpers/action_creators.py
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
add_todo(text)
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
dispatch(add_todo(text))
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
Fix add_todo_if_empty test helper action creator
|
Fix add_todo_if_empty test helper action creator
|
Python
|
mit
|
usrlocalben/pydux
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
add_todo(text)
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
Fix add_todo_if_empty test helper action creator
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
dispatch(add_todo(text))
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
<commit_before>from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
add_todo(text)
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
<commit_msg>Fix add_todo_if_empty test helper action creator<commit_after>
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
dispatch(add_todo(text))
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
add_todo(text)
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
Fix add_todo_if_empty test helper action creatorfrom __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
dispatch(add_todo(text))
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
<commit_before>from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
add_todo(text)
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
<commit_msg>Fix add_todo_if_empty test helper action creator<commit_after>from __future__ import absolute_import
from .action_types import (
ADD_TODO, DISPATCH_IN_MIDDLE, THROW_ERROR, UNKNOWN_ACTION,
)
def add_todo(text):
return {'type': ADD_TODO, 'text': text}
def add_todo_if_empty(text):
def anon(dispatch, get_state):
if len(get_state()) == 0:
dispatch(add_todo(text))
return anon
def dispatch_in_middle(bound_dispatch_fn):
return {
'type': DISPATCH_IN_MIDDLE,
'bound_dispatch_fn': bound_dispatch_fn,
}
def throw_error():
return {
'type': THROW_ERROR,
}
def unknown_action():
return {
'type': UNKNOWN_ACTION,
}
|
b7e1b52e3482de19430d4b04faa90967bd623199
|
Generator.py
|
Generator.py
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
print(rule)
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
Fix crash if form-specific rules were not specified
|
Fix crash if form-specific rules were not specified
|
Python
|
mit
|
kdelwat/Lexeme
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
print(rule)
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
Fix crash if form-specific rules were not specified
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
<commit_before>import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
print(rule)
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
<commit_msg>Fix crash if form-specific rules were not specified<commit_after>
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
print(rule)
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
Fix crash if form-specific rules were not specifiedimport random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
<commit_before>import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
print(rule)
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
<commit_msg>Fix crash if form-specific rules were not specified<commit_after>import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
print(categories)
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
|
9e866ec0488135026dbe9e1d102c9680f892019d
|
librator/packing.py
|
librator/packing.py
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(card.code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
Fix missing card instance before extracting code for filenames
|
Fix missing card instance before extracting code for filenames
|
Python
|
mit
|
Nekroze/librator,Nekroze/librator
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
Fix missing card instance before extracting code for filenames
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(card.code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
<commit_before>"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
<commit_msg>Fix missing card instance before extracting code for filenames<commit_after>
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(card.code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
Fix missing card instance before extracting code for filenames"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(card.code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
<commit_before>"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
<commit_msg>Fix missing card instance before extracting code for filenames<commit_after>"""Implementation of packing and unpacking functions."""
import yaml
from glob import glob
from os.path import join as pjoin
import os
from librarian.card import Card
from librarian.library import Library
def pack(library, carddir):
"""Pack all ``.crd`` card files in the carddir into the given library."""
if os.path.exists(library):
os.remove(library)
lib = Library(library)
lib.create_db()
for cardpath in glob(pjoin(carddir, "*.crd")):
# Open card file and load it with yaml
with open(cardpath) as cardfile:
carddict = yaml.safe_load(cardfile)
# Load the card dict from file into a card object
card = Card().load(carddict)
# Save the card object to the library
lib.save_card(card)
def unpack(library, carddir):
"""Unpack all cards from the given library into carddir as .crd files."""
if not os.path.exists(carddir) or not os.path.isdir(carddir):
os.makedirs(carddir)
lib = Library(library)
cardpath = pjoin(carddir, "{0}.crd")
for card in lib.retrieve_all():
with open(cardpath.format(card.code), 'w') as cardfile:
yaml.dump(card.save(), cardfile, default_flow_style=False)
|
96962b19518186b55c41a19d1cfdaae23eb899e3
|
eduid_signup_amp/__init__.py
|
eduid_signup_amp/__init__.py
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist(user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
Change to sync with latest changes in eduid_am related to Exceptions
|
Change to sync with latest changes in eduid_am related to Exceptions
|
Python
|
bsd-3-clause
|
SUNET/eduid-signup-amp
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist(user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
Change to sync with latest changes in eduid_am related to Exceptions
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
<commit_before>from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist(user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
<commit_msg>Change to sync with latest changes in eduid_am related to Exceptions<commit_after>
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist(user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
Change to sync with latest changes in eduid_am related to Exceptionsfrom eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
<commit_before>from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist(user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
<commit_msg>Change to sync with latest changes in eduid_am related to Exceptions<commit_after>from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.registered.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
|
ea2247fe90836e92067ce27e5b22cf8e7dc7bc1b
|
saleor/app/tasks.py
|
saleor/app/tasks.py
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
Add more context to install app msg
|
Add more context to install app msg
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
Add more context to install app msg
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
<commit_before>import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
<commit_msg>Add more context to install app msg<commit_after>
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
Add more context to install app msgimport logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
<commit_before>import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
<commit_msg>Add more context to install app msg<commit_after>import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
b47bdb4eca8c357a8c33c5b95ab80748f1358e00
|
lintreview/utils.py
|
lintreview/utils.py
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except OSError:
return False
return name in installed
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except subprocess.CalledProcessError or OSError:
return False
return name in installed
|
Handle case where bundler exists, but there is no Gemfile
|
Handle case where bundler exists, but there is no Gemfile
```
adrian@kamek:~$ bundle list
Could not locate Gemfile
adrian@kamek:~$ echo $?
10
```
|
Python
|
mit
|
zoidbergwill/lint-review,markstory/lint-review,markstory/lint-review,adrianmoisey/lint-review,markstory/lint-review,zoidbergwill/lint-review,zoidbergwill/lint-review,adrianmoisey/lint-review
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except OSError:
return False
return name in installed
Handle case where bundler exists, but there is no Gemfile
```
adrian@kamek:~$ bundle list
Could not locate Gemfile
adrian@kamek:~$ echo $?
10
```
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except subprocess.CalledProcessError or OSError:
return False
return name in installed
|
<commit_before>import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except OSError:
return False
return name in installed
<commit_msg>Handle case where bundler exists, but there is no Gemfile
```
adrian@kamek:~$ bundle list
Could not locate Gemfile
adrian@kamek:~$ echo $?
10
```<commit_after>
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except subprocess.CalledProcessError or OSError:
return False
return name in installed
|
import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except OSError:
return False
return name in installed
Handle case where bundler exists, but there is no Gemfile
```
adrian@kamek:~$ bundle list
Could not locate Gemfile
adrian@kamek:~$ echo $?
10
```import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except subprocess.CalledProcessError or OSError:
return False
return name in installed
|
<commit_before>import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except OSError:
return False
return name in installed
<commit_msg>Handle case where bundler exists, but there is no Gemfile
```
adrian@kamek:~$ bundle list
Could not locate Gemfile
adrian@kamek:~$ echo $?
10
```<commit_after>import os
import subprocess
def in_path(name):
"""
Check whether or not a command line tool
exists in the system path.
@return boolean
"""
for dirname in os.environ['PATH'].split(os.pathsep):
if os.path.exists(os.path.join(dirname, name)):
return True
return False
def npm_exists(name):
"""
Check whether or not a cli tool exists in a node_modules/.bin
dir in os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'node_modules', '.bin', name)
return os.path.exists(path)
def composer_exists(name):
"""
Check whether or not a cli tool exists in vendor/bin/{name}
relative to os.cwd
@return boolean
"""
cwd = os.getcwd()
path = os.path.join(cwd, 'vendor', 'bin', name)
return os.path.exists(path)
def bundle_exists(name):
"""
Check whether or not a ruby tool exists in
the os.cwd using bundler.
This assumes that you installed bundler packages
into ./bundle as documented in the README.
@return boolean
"""
try:
installed = subprocess.check_output(['bundle', 'list'])
except subprocess.CalledProcessError or OSError:
return False
return name in installed
|
f7f20c50b82e3b8f8f2be4687e661348979fe6a6
|
script_helpers.py
|
script_helpers.py
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs='+',
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser, nargs_in='+'):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs=nargs_in,
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
Allow for directories argument to be optional
|
Allow for directories argument to be optional
|
Python
|
bsd-3-clause
|
mwcraig/msumastro
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs='+',
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
Allow for directories argument to be optional
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser, nargs_in='+'):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs=nargs_in,
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
<commit_before>"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs='+',
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
<commit_msg>Allow for directories argument to be optional<commit_after>
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser, nargs_in='+'):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs=nargs_in,
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs='+',
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
Allow for directories argument to be optional"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser, nargs_in='+'):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs=nargs_in,
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
<commit_before>"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs='+',
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
<commit_msg>Allow for directories argument to be optional<commit_after>"""A set of functions to standardize some options for python scripts."""
def setup_parser_help(parser, additional_docs=None):
"""
Set formatting for parser to raw and add docstring to help output
Parameters
----------
parser : `ArgumentParser`
The parser to be modified.
additional_docs: str
Any documentation to be added to the documentation produced by
`argparse`
"""
from argparse import RawDescriptionHelpFormatter
parser.formatter_class = RawDescriptionHelpFormatter
if additional_docs is not None:
parser.epilog = additional_docs
def add_verbose(parser):
"""
Add a verbose option (--verbose or -v) to parser.
Parameters:
-----------
parser : `ArgumentParser`
"""
verbose_help = "provide more information during processing"
parser.add_argument("-v", "--verbose", help=verbose_help,
action="store_true")
def add_directories(parser, nargs_in='+'):
"""
Add a positional argument that is one or more directories.
Parameters
----------
parser : `ArgumentParser`
"""
parser.add_argument("dir", metavar='dir', nargs=nargs_in,
help="Directory to process")
def construct_default_parser(docstring=None):
#import script_helpers
import argparse
parser = argparse.ArgumentParser()
if docstring is not None:
setup_parser_help(parser, docstring)
add_verbose(parser)
add_directories(parser)
return parser
|
31f81fd98a678949b1bb7d14863d497ab40d5afc
|
locksmith/common.py
|
locksmith/common.py
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,unicode(v).encode('utf-8'))
for k,v in params.iteritems()
if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
Convert url param values to unicode before encoding.
|
Convert url param values to unicode before encoding.
|
Python
|
bsd-3-clause
|
sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
Convert url param values to unicode before encoding.
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,unicode(v).encode('utf-8'))
for k,v in params.iteritems()
if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
<commit_before>import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
<commit_msg>Convert url param values to unicode before encoding.<commit_after>
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,unicode(v).encode('utf-8'))
for k,v in params.iteritems()
if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
Convert url param values to unicode before encoding.import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,unicode(v).encode('utf-8'))
for k,v in params.iteritems()
if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
<commit_before>import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
<commit_msg>Convert url param values to unicode before encoding.<commit_after>import hashlib
import hmac
import urllib, urllib2
API_OPERATING_STATUSES = (
(1, 'Normal'),
(2, 'Degraded Service'),
(3, 'Service Disruption'),
(4, 'Undergoing Maintenance')
)
API_STATUSES = (
(1, 'Active'),
(2, 'Deprecated'),
(3, 'Disabled')
)
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,unicode(v).encode('utf-8'))
for k,v in params.iteritems()
if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
e5a634100feb5ee486c1de0cdb21325de6477538
|
services/vimeo.py
|
services/vimeo.py
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
Rewrite Vimeo to use the new scope selection system
|
Rewrite Vimeo to use the new scope selection system
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
Rewrite Vimeo to use the new scope selection system
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
<commit_before>import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
<commit_msg>Rewrite Vimeo to use the new scope selection system<commit_after>
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
Rewrite Vimeo to use the new scope selection systemimport foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
<commit_before>import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
<commit_msg>Rewrite Vimeo to use the new scope selection system<commit_after>import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
973ff308f16fe033b5da60a28cb0d6448062a8f9
|
examples/basic_datalogger.py
|
examples/basic_datalogger.py
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.69.122')#.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
Make sure the schema gets pulled in as a module
|
HG-1494: Make sure the schema gets pulled in as a module
|
Python
|
mit
|
liquidinstruments/pymoku
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
HG-1494: Make sure the schema gets pulled in as a module
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.69.122')#.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
<commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
<commit_msg>HG-1494: Make sure the schema gets pulled in as a module<commit_after>
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.69.122')#.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
HG-1494: Make sure the schema gets pulled in as a modulefrom pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.69.122')#.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
<commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
<commit_msg>HG-1494: Make sure the schema gets pulled in as a module<commit_after>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.69.122')#.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme))
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print("Error occured: %s" % e)
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print(e)
finally:
m.close()
|
ca3978b6068add93418b4c5db8346143533beb7e
|
examples/forwarder_device.py
|
examples/forwarder_device.py
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
Print ports when forwarder device starts.
|
MNT: Print ports when forwarder device starts.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
MNT: Print ports when forwarder device starts.
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
<commit_before>import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
<commit_msg>MNT: Print ports when forwarder device starts.<commit_after>
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
MNT: Print ports when forwarder device starts.import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
<commit_before>import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
<commit_msg>MNT: Print ports when forwarder device starts.<commit_after>import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
|
2c1ffd6abed12de8878ec60021ae16dc9c011975
|
auth0/v2/authentication/link.py
|
auth0/v2/authentication/link.py
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
"""Link accounts endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
"""Unlink an account.
"""
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
Add docstrings in Link class
|
Add docstrings in Link class
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
Add docstrings in Link class
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
"""Link accounts endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
"""Unlink an account.
"""
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
<commit_before>from .base import AuthenticationBase
class Link(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
<commit_msg>Add docstrings in Link class<commit_after>
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
"""Link accounts endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
"""Unlink an account.
"""
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
from .base import AuthenticationBase
class Link(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
Add docstrings in Link classfrom .base import AuthenticationBase
class Link(AuthenticationBase):
"""Link accounts endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
"""Unlink an account.
"""
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
<commit_before>from .base import AuthenticationBase
class Link(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
<commit_msg>Add docstrings in Link class<commit_after>from .base import AuthenticationBase
class Link(AuthenticationBase):
"""Link accounts endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def unlink(self, access_token, user_id):
"""Unlink an account.
"""
return self.post(
url='https://%s/unlink' % self.domain,
data={
'access_token': access_token,
'user_id': user_id,
},
headers={'Content-Type': 'application/json'}
)
|
44d03e7688cb3b0c14b203fbbef859ad5effc46e
|
run_faults.py
|
run_faults.py
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.clawutil.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
Change to alternative batch class
|
Change to alternative batch class
|
Python
|
mit
|
mandli/compsyn-geoclaw
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.clawutil.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()Change to alternative batch class
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
<commit_before>#!/usr/bin/env python
import os
import sys
import glob
import clawpack.clawutil.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()<commit_msg>Change to alternative batch class<commit_after>
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
#!/usr/bin/env python
import os
import sys
import glob
import clawpack.clawutil.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()Change to alternative batch class#!/usr/bin/env python
import os
import sys
import glob
import clawpack.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
<commit_before>#!/usr/bin/env python
import os
import sys
import glob
import clawpack.clawutil.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()<commit_msg>Change to alternative batch class<commit_after>#!/usr/bin/env python
import os
import sys
import glob
import clawpack.tests as clawtests
class FaultTest(clawtests.Test):
def __init__(self, deformation_file):
super(FaultTest, self).__init__()
self.type = "compsys"
self.name = "guerrero_gap"
self.prefix = os.path.basename(deformation_file).split('.')[0]
self.deformation_file = os.path.abspath(deformation_file)
self.executable = 'xgeoclaw'
# Data objects
import setrun
self.rundata = setrun.setrun()
# Add deformation file
self.rundata.dtopo_data.dtopofiles = []
self.rundata.dtopo_data.dtopofiles.append([1,5,5,self.deformation_file])
def __str__(self):
output = super(FaultTest, self).__str__()
output += "\n Deformation File: %s" % self.deformation_file
return output
if __name__ == '__main__':
if len(sys.argv) > 1:
deformation_files = sys.argv[1:]
else:
deformation_files = glob.glob('./bathy/rot_gap*.xyzt')
tests = []
for deformation_file in deformation_files:
tests.append(FaultTest(deformation_file))
controller = clawtests.TestController(tests)
print controller
controller.tar = True
controller.run()
|
d098ca43600f98f3e6c4c89601099964d27c9b22
|
djoauth2/decorators.py
|
djoauth2/decorators.py
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scopes=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scope_names=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
Fix name mismatch / typo.
|
Fix name mismatch / typo.
|
Python
|
mit
|
Locu/djoauth2,seler/djoauth2,vden/djoauth2-ng,Locu/djoauth2,vden/djoauth2-ng,seler/djoauth2
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scopes=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
Fix name mismatch / typo.
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scope_names=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
<commit_before># coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scopes=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
<commit_msg>Fix name mismatch / typo.<commit_after>
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scope_names=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scopes=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
Fix name mismatch / typo.# coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scope_names=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
<commit_before># coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scopes=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
<commit_msg>Fix name mismatch / typo.<commit_after># coding: utf-8
from django.utils.functional import wraps
from djoauth2.access_token_authenticator import AccessTokenAuthenticator
def oauth_scope(*scope_names):
""" Only allow requests with sufficient OAuth scope access.
Returns a decorator that restricts requests to those that authenticate
successfully and have access to the given scope names.
For example, to restrict access to a given endpoint:
>>> @oauth_scope('foo', 'bar')
>>> def secret_attribute_endpoint(access_token, request, *args, **kwargs):
>>> # Because of the decorator, the function is guarnateed to only be run
>>> # if the request includes proper access to the 'foo' and 'bar'
>>> # scopes.
>>> user = access_token.user
>>> return HttpResponse(json.dumps({
>>> 'super_secret_attribute' : user.super_secret_attribute
>>> })
The first argument to the wrapped endpoint will now be an AccessToken
object. The second argument will be the original HttpRequest, and all
other parameters will follow.
"""
authenticator = AccessTokenAuthenticator(required_scope_names=scope_names)
def scope_decorator(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
access_token, error_response_arguments = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_response_arguments)
return view_func(access_token, request, *args, **kwargs)
return wrapper
return scope_decorator
|
7bc63a405e278cf5d1b7d7dac0df938dfd7b7583
|
lelei/parser.py
|
lelei/parser.py
|
import xml.etree.ElementTree as ET
import re
from sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
import xml.etree.ElementTree as ET
import re
from .sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
#some fields (e.g. float32) have a fixed size, so it's useless
# to set `bits=0` while defining such fields.
try:
#obviously, the KeyError is related to `bits` and not to `type`.
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
except KeyError:
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], 0)
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
Improve field parsing: fixed-size fields may not have `bits` attribute
|
Improve field parsing: fixed-size fields may not have `bits` attribute
Some fields whose type imposes a fixed size may not find
useful a `bits` attribute.
For the sake of explaining, you cannot define a "bitfield float":
not only it makes no sense, but it's not possible to define it in
a C struct or in WSGD format.
|
Python
|
bsd-2-clause
|
alfateam123/lelei
|
import xml.etree.ElementTree as ET
import re
from sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return astImprove field parsing: fixed-size fields may not have `bits` attribute
Some fields whose type imposes a fixed size may not find
useful a `bits` attribute.
For the sake of explaining, you cannot define a "bitfield float":
not only it makes no sense, but it's not possible to define it in
a C struct or in WSGD format.
|
import xml.etree.ElementTree as ET
import re
from .sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
#some fields (e.g. float32) have a fixed size, so it's useless
# to set `bits=0` while defining such fields.
try:
#obviously, the KeyError is related to `bits` and not to `type`.
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
except KeyError:
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], 0)
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
<commit_before>import xml.etree.ElementTree as ET
import re
from sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast<commit_msg>Improve field parsing: fixed-size fields may not have `bits` attribute
Some fields whose type imposes a fixed size may not find
useful a `bits` attribute.
For the sake of explaining, you cannot define a "bitfield float":
not only it makes no sense, but it's not possible to define it in
a C struct or in WSGD format.<commit_after>
|
import xml.etree.ElementTree as ET
import re
from .sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
#some fields (e.g. float32) have a fixed size, so it's useless
# to set `bits=0` while defining such fields.
try:
#obviously, the KeyError is related to `bits` and not to `type`.
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
except KeyError:
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], 0)
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
import xml.etree.ElementTree as ET
import re
from sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return astImprove field parsing: fixed-size fields may not have `bits` attribute
Some fields whose type imposes a fixed size may not find
useful a `bits` attribute.
For the sake of explaining, you cannot define a "bitfield float":
not only it makes no sense, but it's not possible to define it in
a C struct or in WSGD format.import xml.etree.ElementTree as ET
import re
from .sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
#some fields (e.g. float32) have a fixed size, so it's useless
# to set `bits=0` while defining such fields.
try:
#obviously, the KeyError is related to `bits` and not to `type`.
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
except KeyError:
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], 0)
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
<commit_before>import xml.etree.ElementTree as ET
import re
from sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast<commit_msg>Improve field parsing: fixed-size fields may not have `bits` attribute
Some fields whose type imposes a fixed size may not find
useful a `bits` attribute.
For the sake of explaining, you cannot define a "bitfield float":
not only it makes no sense, but it's not possible to define it in
a C struct or in WSGD format.<commit_after>import xml.etree.ElementTree as ET
import re
from .sizes import SIZE_CHECKERS
def _getroot(str_):
return ET.fromstring(str_)
def bitsForStructure(struct_type, read_bits):
try:
return SIZE_CHECKERS[struct_type](read_bits)
except KeyError:
raise ValueError("the given structure type {} does not exist".format(struct_type))
def structure_name(doc):
names = doc.findall("name")
assert len(names) == 1
return names[0].text
def parse_fields(doc):
doc_fields = doc.findall("fields/field")
assert len(doc_fields) > 0
fields = [parse_field(f_) for f_ in doc_fields]
return fields
def parse_field(field_doc):
field_ast = dict()
field_ast["name"] = field_doc.text
field_ast["type"] = field_doc.attrib["type"]
#some fields (e.g. float32) have a fixed size, so it's useless
# to set `bits=0` while defining such fields.
try:
#obviously, the KeyError is related to `bits` and not to `type`.
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], int(field_doc.attrib["bits"]))
except KeyError:
field_ast["bits"] = bitsForStructure(field_doc.attrib["type"], 0)
return field_ast
def build_ast(doc):
ast = dict()
ast["name"] = structure_name(doc)
ast["fields"] = parse_fields(doc)
return ast
def parse(str_):
root = _getroot(str_)
ast = build_ast(root)
return ast
|
6f61fbf2402cef5097e0cf6392a5ab39461ced60
|
metal/mmtl/task.py
|
metal/mmtl/task.py
|
from typing import Callable, List
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
input_name: The name of the input module to use
head_name: The name of the task head module to use
TODO: replace this with a more fully-featured path through the network
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_name: str,
head_name: str,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_name = input_name
self.head_name = head_name
self.data_loaders = data_loaders
self.scorers = scorers
|
from typing import Callable, List
import torch.nn as nn
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_module: nn.Module,
head_module: nn.Module,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_module = input_module
self.head_module = head_module
self.data_loaders = data_loaders
self.scorers = scorers
|
Update Task definition to include modules instead of module names
|
Update Task definition to include modules instead of module names
|
Python
|
apache-2.0
|
HazyResearch/metal,HazyResearch/metal
|
from typing import Callable, List
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
input_name: The name of the input module to use
head_name: The name of the task head module to use
TODO: replace this with a more fully-featured path through the network
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_name: str,
head_name: str,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_name = input_name
self.head_name = head_name
self.data_loaders = data_loaders
self.scorers = scorers
Update Task definition to include modules instead of module names
|
from typing import Callable, List
import torch.nn as nn
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_module: nn.Module,
head_module: nn.Module,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_module = input_module
self.head_module = head_module
self.data_loaders = data_loaders
self.scorers = scorers
|
<commit_before>from typing import Callable, List
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
input_name: The name of the input module to use
head_name: The name of the task head module to use
TODO: replace this with a more fully-featured path through the network
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_name: str,
head_name: str,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_name = input_name
self.head_name = head_name
self.data_loaders = data_loaders
self.scorers = scorers
<commit_msg>Update Task definition to include modules instead of module names<commit_after>
|
from typing import Callable, List
import torch.nn as nn
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_module: nn.Module,
head_module: nn.Module,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_module = input_module
self.head_module = head_module
self.data_loaders = data_loaders
self.scorers = scorers
|
from typing import Callable, List
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
input_name: The name of the input module to use
head_name: The name of the task head module to use
TODO: replace this with a more fully-featured path through the network
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_name: str,
head_name: str,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_name = input_name
self.head_name = head_name
self.data_loaders = data_loaders
self.scorers = scorers
Update Task definition to include modules instead of module namesfrom typing import Callable, List
import torch.nn as nn
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_module: nn.Module,
head_module: nn.Module,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_module = input_module
self.head_module = head_module
self.data_loaders = data_loaders
self.scorers = scorers
|
<commit_before>from typing import Callable, List
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
input_name: The name of the input module to use
head_name: The name of the task head module to use
TODO: replace this with a more fully-featured path through the network
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_name: str,
head_name: str,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_name = input_name
self.head_name = head_name
self.data_loaders = data_loaders
self.scorers = scorers
<commit_msg>Update Task definition to include modules instead of module names<commit_after>from typing import Callable, List
import torch.nn as nn
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
input_module: nn.Module,
head_module: nn.Module,
data_loaders: List[DataLoader],
scorers: List[Callable] = None,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.input_module = input_module
self.head_module = head_module
self.data_loaders = data_loaders
self.scorers = scorers
|
6f37705efcebf9548705ee75f3814ccd1fd4cf60
|
ckanext/requestdata/emailer.py
|
ckanext/requestdata/emailer.py
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'Success' : True,
'Message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'Success': False,
'Message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'success' : True,
'message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'success': False,
'message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
Change 'success' and 'message' to lowercase
|
Change 'success' and 'message' to lowercase
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'Success' : True,
'Message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'Success': False,
'Message' : 'An error occured while sending the email. Try again.'
}
return error_dictChange 'success' and 'message' to lowercase
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'success' : True,
'message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'success': False,
'message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
<commit_before>import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'Success' : True,
'Message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'Success': False,
'Message' : 'An error occured while sending the email. Try again.'
}
return error_dict<commit_msg>Change 'success' and 'message' to lowercase<commit_after>
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'success' : True,
'message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'success': False,
'message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'Success' : True,
'Message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'Success': False,
'Message' : 'An error occured while sending the email. Try again.'
}
return error_dictChange 'success' and 'message' to lowercaseimport logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'success' : True,
'message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'success': False,
'message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
<commit_before>import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'Success' : True,
'Message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'Success': False,
'Message' : 'An error occured while sending the email. Try again.'
}
return error_dict<commit_msg>Change 'success' and 'message' to lowercase<commit_after>import logging
import smtplib
from socket import error as socket_error
from email.mime.text import MIMEText
from pylons import config
log = logging.getLogger(__name__)
SMTP_SERVER = config.get('ckanext.requestdata.smtp.server', '')
SMTP_USER = config.get('ckanext.requestdata.smtp.user', '')
SMTP_PASSWORD = config.get('ckanext.requestdata.smtp.password', '')
def send_email(content, to, from_, subject):
'''Sends email
:param content: The body content for the mail.
:type string:
:param to: To whom will be mail sent
:type string:
:param from_: The sender of mail.
:type string:
:rtype: string
'''
msg = MIMEText(content,'plain','UTF-8')
if isinstance(to, basestring):
to = [to]
msg['Subject'] = subject
msg['From'] = from_
msg['To'] = ','.join(to)
try:
s = smtplib.SMTP(SMTP_SERVER)
s.login(SMTP_USER, SMTP_PASSWORD)
s.sendmail(from_, to, msg.as_string())
s.quit()
response_dict = {
'success' : True,
'message' : 'Email message was successfully sent.'
}
return response_dict
except socket_error:
log.critical('Could not connect to email server. Have you configured the SMTP settings?')
error_dict = {
'success': False,
'message' : 'An error occured while sending the email. Try again.'
}
return error_dict
|
8afb758e016e8dc3f4360195db2aa94c8693027b
|
client/tests/framework_test.py
|
client/tests/framework_test.py
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
def test_find_assignment(self):
self.assertRaises(Exception, ok.load_test_file, ['.', None])
self.assertTrue(ok.load_test_file('demo_assignments'))
self.assertTrue(ok.load_test_file('demo_assignments/hw1_tests.py'))
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
# TODO Before Merge: Update test script to run Python 3
# TODO Before Merge: Create tests for find_test_file, load_test_file, and get_src_paths
|
Remove old tests and add TODOs
|
Remove old tests and add TODOs
|
Python
|
apache-2.0
|
jackzhao-mj/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,jackzhao-mj/ok,jackzhao-mj/ok,jordonwii/ok,jordonwii/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
def test_find_assignment(self):
self.assertRaises(Exception, ok.load_test_file, ['.', None])
self.assertTrue(ok.load_test_file('demo_assignments'))
self.assertTrue(ok.load_test_file('demo_assignments/hw1_tests.py'))
Remove old tests and add TODOs
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
# TODO Before Merge: Update test script to run Python 3
# TODO Before Merge: Create tests for find_test_file, load_test_file, and get_src_paths
|
<commit_before>#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
def test_find_assignment(self):
self.assertRaises(Exception, ok.load_test_file, ['.', None])
self.assertTrue(ok.load_test_file('demo_assignments'))
self.assertTrue(ok.load_test_file('demo_assignments/hw1_tests.py'))
<commit_msg>Remove old tests and add TODOs<commit_after>
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
# TODO Before Merge: Update test script to run Python 3
# TODO Before Merge: Create tests for find_test_file, load_test_file, and get_src_paths
|
#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
def test_find_assignment(self):
self.assertRaises(Exception, ok.load_test_file, ['.', None])
self.assertTrue(ok.load_test_file('demo_assignments'))
self.assertTrue(ok.load_test_file('demo_assignments/hw1_tests.py'))
Remove old tests and add TODOs#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
# TODO Before Merge: Update test script to run Python 3
# TODO Before Merge: Create tests for find_test_file, load_test_file, and get_src_paths
|
<commit_before>#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
def test_find_assignment(self):
self.assertRaises(Exception, ok.load_test_file, ['.', None])
self.assertTrue(ok.load_test_file('demo_assignments'))
self.assertTrue(ok.load_test_file('demo_assignments/hw1_tests.py'))
<commit_msg>Remove old tests and add TODOs<commit_after>#!/usr/bin/python3
import unittest
import ok
import sys
class TestProtocol(ok.Protocol):
name = "test"
def __init__(self, args, src_files):
ok.Protocol.__init__(args, src_files)
self.called_start = 0
self.called_interact = 0
def on_start(self):
self.called_start += 1
def on_interact(self):
self.called_interact += 1
class OkTest(unittest.TestCase):
def setUp(self):
self.hw1 = './demo_assignments/hw1.py'
self.hw1_tests = './demo_assignments/hw1_tests.py'
def test_parse_input(self):
old_sys_argv = sys.argv[1:]
sys.argv[1:] = []
_ = ok.parse_input() # Does not crash and returns a value.
sys.argv[1:] = old_sys_argv
def test_get_assignment(self):
self.assertIsNone(ok.get_assignment(self.hw1))
self.assertIsNot(ok.get_assignment(self.hw1_tests), None)
# TODO Before Merge: Update test script to run Python 3
# TODO Before Merge: Create tests for find_test_file, load_test_file, and get_src_paths
|
68b01ea3b6d70a991d3ca0f3e6bff08290caa292
|
packr/home/views.py
|
packr/home/views.py
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
print('angularhit')
return render_template('index.html')
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
return render_template('index.html')
|
Remove uneccessary 'angularhit' debug printout.
|
Remove uneccessary 'angularhit' debug printout.
|
Python
|
mit
|
KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
print('angularhit')
return render_template('index.html')
Remove uneccessary 'angularhit' debug printout.
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
return render_template('index.html')
|
<commit_before>from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
print('angularhit')
return render_template('index.html')
<commit_msg>Remove uneccessary 'angularhit' debug printout.<commit_after>
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
return render_template('index.html')
|
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
print('angularhit')
return render_template('index.html')
Remove uneccessary 'angularhit' debug printout.from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
return render_template('index.html')
|
<commit_before>from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
print('angularhit')
return render_template('index.html')
<commit_msg>Remove uneccessary 'angularhit' debug printout.<commit_after>from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def index(path):
return render_template('index.html')
|
cf995a27028abaca65ee23509277e8776665d70d
|
tests/fields/test_bytes.py
|
tests/fields/test_bytes.py
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67])
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes(bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67]))
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
Update byte objects tests to reflect new behaviour.
|
Update byte objects tests to reflect new behaviour.
|
Python
|
mit
|
Pr0Ger/protobuf3
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67])
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
Update byte objects tests to reflect new behaviour.
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes(bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67]))
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
<commit_before>from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67])
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
<commit_msg>Update byte objects tests to reflect new behaviour.<commit_after>
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes(bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67]))
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67])
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
Update byte objects tests to reflect new behaviour.from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes(bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67]))
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
<commit_before>from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67])
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
<commit_msg>Update byte objects tests to reflect new behaviour.<commit_after>from protobuf3.fields.bytes import BytesField
from protobuf3.message import Message
from unittest import TestCase
class TestBytesField(TestCase):
def setUp(self):
class BytesTestMessage(Message):
b = BytesField(field_number=2)
self.msg_cls = BytesTestMessage
def test_get(self):
msg = self.msg_cls()
msg.parse_from_bytes(bytes([0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6E, 0x67]))
self.assertEqual(msg.b, b'testing')
def test_default_get(self):
msg = self.msg_cls()
self.assertEqual(msg.b, b'')
def test_set(self):
msg = self.msg_cls()
msg.b = b'test'
self.assertEqual(msg.b, b'test')
def test_set_string(self):
msg = self.msg_cls()
msg.b = 'test'
self.assertEqual(msg.b, b'test')
def test_invalid_set(self):
msg = self.msg_cls()
def failure():
msg.b = 123
self.assertRaises(ValueError, failure)
|
76162a98044f2a481e2ef34d32b7e8196e534b78
|
python/src/setup.py
|
python/src/setup.py
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
Create PyPI Release for 1.9.5.0.
|
Create PyPI Release for 1.9.5.0.
R=ozarov
DELTA=3 (0 added, 0 deleted, 3 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7045
|
Python
|
apache-2.0
|
aozarov/appengine-gcs-client,aozarov/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
Create PyPI Release for 1.9.5.0.
R=ozarov
DELTA=3 (0 added, 0 deleted, 3 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7045
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
<commit_before>"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
<commit_msg>Create PyPI Release for 1.9.5.0.
R=ozarov
DELTA=3 (0 added, 0 deleted, 3 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7045<commit_after>
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
Create PyPI Release for 1.9.5.0.
R=ozarov
DELTA=3 (0 added, 0 deleted, 3 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7045"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
<commit_before>"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
<commit_msg>Create PyPI Release for 1.9.5.0.
R=ozarov
DELTA=3 (0 added, 0 deleted, 3 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7045<commit_after>"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.5.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
356257d3a0db07548c2efe0694c2fb210900b38a
|
keystoneclient/exceptions.py
|
keystoneclient/exceptions.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
class CertificateConfigError(Exception):
"""Error reading the certificate"""
def __init__(self, output):
self.output = output
msg = ("Unable to load certificate. "
"Ensure your system is configured properly.")
super(CertificateConfigError, self).__init__(msg)
|
Migrate the keystone.common.cms to keystoneclient
|
Migrate the keystone.common.cms to keystoneclient
- Add checking the openssl return code 2, related to following review
https://review.openstack.org/#/c/22716/
- Add support set subprocess to the cms, when we already know which
subprocess to use.
Closes-Bug: #1142574
Change-Id: I3f86e6ca8bb7738f57051ce7f0f5662b20e7a22b
|
Python
|
apache-2.0
|
citrix-openstack-build/keystoneauth,jamielennox/keystoneauth,sileht/keystoneauth
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
Migrate the keystone.common.cms to keystoneclient
- Add checking the openssl return code 2, related to following review
https://review.openstack.org/#/c/22716/
- Add support set subprocess to the cms, when we already know which
subprocess to use.
Closes-Bug: #1142574
Change-Id: I3f86e6ca8bb7738f57051ce7f0f5662b20e7a22b
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
class CertificateConfigError(Exception):
"""Error reading the certificate"""
def __init__(self, output):
self.output = output
msg = ("Unable to load certificate. "
"Ensure your system is configured properly.")
super(CertificateConfigError, self).__init__(msg)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
<commit_msg>Migrate the keystone.common.cms to keystoneclient
- Add checking the openssl return code 2, related to following review
https://review.openstack.org/#/c/22716/
- Add support set subprocess to the cms, when we already know which
subprocess to use.
Closes-Bug: #1142574
Change-Id: I3f86e6ca8bb7738f57051ce7f0f5662b20e7a22b<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
class CertificateConfigError(Exception):
"""Error reading the certificate"""
def __init__(self, output):
self.output = output
msg = ("Unable to load certificate. "
"Ensure your system is configured properly.")
super(CertificateConfigError, self).__init__(msg)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
Migrate the keystone.common.cms to keystoneclient
- Add checking the openssl return code 2, related to following review
https://review.openstack.org/#/c/22716/
- Add support set subprocess to the cms, when we already know which
subprocess to use.
Closes-Bug: #1142574
Change-Id: I3f86e6ca8bb7738f57051ce7f0f5662b20e7a22b# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
class CertificateConfigError(Exception):
"""Error reading the certificate"""
def __init__(self, output):
self.output = output
msg = ("Unable to load certificate. "
"Ensure your system is configured properly.")
super(CertificateConfigError, self).__init__(msg)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
<commit_msg>Migrate the keystone.common.cms to keystoneclient
- Add checking the openssl return code 2, related to following review
https://review.openstack.org/#/c/22716/
- Add support set subprocess to the cms, when we already know which
subprocess to use.
Closes-Bug: #1142574
Change-Id: I3f86e6ca8bb7738f57051ce7f0f5662b20e7a22b<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
#flake8: noqa
from keystoneclient.apiclient.exceptions import *
class CertificateConfigError(Exception):
"""Error reading the certificate"""
def __init__(self, output):
self.output = output
msg = ("Unable to load certificate. "
"Ensure your system is configured properly.")
super(CertificateConfigError, self).__init__(msg)
|
a9755fc4b30629ea2c9db51aa6d4218f99fcabc3
|
frigg/deployments/migrations/0004_auto_20150725_1456.py
|
frigg/deployments/migrations/0004_auto_20150725_1456.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
Set FRIGG_PREVIEW_IMAGE in db migrations
|
Set FRIGG_PREVIEW_IMAGE in db migrations
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
Set FRIGG_PREVIEW_IMAGE in db migrations
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
<commit_msg>Set FRIGG_PREVIEW_IMAGE in db migrations<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
Set FRIGG_PREVIEW_IMAGE in db migrations# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
<commit_msg>Set FRIGG_PREVIEW_IMAGE in db migrations<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
5547f8a11192e9182b6d9aceef99249fc7b9d2cb
|
froide/publicbody/migrations/0007_auto_20171224_0744.py
|
froide/publicbody/migrations/0007_auto_20171224_0744.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
Fix pb migration, by faking treebeard
|
Fix pb migration, by faking treebeard
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
Fix pb migration, by faking treebeard
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
<commit_msg>Fix pb migration, by faking treebeard<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
Fix pb migration, by faking treebeard# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
<commit_msg>Fix pb migration, by faking treebeard<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-24 06:44
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
262a8fe3651a4ad368fd6594cba0669267c2d225
|
run_deploy_job_wr.py
|
run_deploy_job_wr.py
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
'artifacts/*.json',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
Add *.json to the list of artifacts backed up by Workspace Runner.
|
Add *.json to the list of artifacts backed up by Workspace Runner.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
Add *.json to the list of artifacts backed up by Workspace Runner.
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
'artifacts/*.json',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
<commit_msg>Add *.json to the list of artifacts backed up by Workspace Runner.<commit_after>
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
'artifacts/*.json',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
Add *.json to the list of artifacts backed up by Workspace Runner.#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
'artifacts/*.json',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
<commit_msg>Add *.json to the list of artifacts backed up by Workspace Runner.<commit_after>#!/usr/bin/env python
import json
import os
from os.path import join
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix = 'juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
'artifacts/*.json',
]},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config, '-v',
])
if __name__ == '__main__':
main()
|
4ecc6184ce1a41680b011991afc3539d817f82ce
|
main.py
|
main.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
Change newline character to LF
|
Change newline character to LF
|
Python
|
apache-2.0
|
jiangzc/TiebaSign
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
Change newline character to LF
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
<commit_msg>Change newline character to LF<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
Change newline character to LF#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
<commit_msg>Change newline character to LF<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import Tool
import time
from Tieba import Tieba
def main():
print("Local Time:", time.asctime(time.localtime()))
# Read Cookies
cookies = Tool.load_cookies_path(".")
for cookie in cookies:
# Login
user = Tieba(cookie)
# List Likes
print(user.get_likes())
# Sign
print(user.username, "Signing")
for name in user.get_likes():
if user.sign_Wap(name):
time.sleep(10)
main()
|
dc8a4cbd6dfd873b0914e66b68a76a8520302643
|
main.py
|
main.py
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
city_country()
if __name__ == "__main__":
main()
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
if not os.path.isfile(CITY_STATE_FILE):
city_country()
cities = pd.read_csv(CITY_STATE_FILE)
if __name__ == "__main__":
main()
|
Make sure not to commit non-compiling code
|
Make sure not to commit non-compiling code
|
Python
|
mit
|
MichaelSheely/RegionPredictionFromTemperature
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
city_country()
if __name__ == "__main__":
main()
Make sure not to commit non-compiling code
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
if not os.path.isfile(CITY_STATE_FILE):
city_country()
cities = pd.read_csv(CITY_STATE_FILE)
if __name__ == "__main__":
main()
|
<commit_before>import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
city_country()
if __name__ == "__main__":
main()
<commit_msg>Make sure not to commit non-compiling code<commit_after>
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
if not os.path.isfile(CITY_STATE_FILE):
city_country()
cities = pd.read_csv(CITY_STATE_FILE)
if __name__ == "__main__":
main()
|
import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
city_country()
if __name__ == "__main__":
main()
Make sure not to commit non-compiling codeimport pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
if not os.path.isfile(CITY_STATE_FILE):
city_country()
cities = pd.read_csv(CITY_STATE_FILE)
if __name__ == "__main__":
main()
|
<commit_before>import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
city_country()
if __name__ == "__main__":
main()
<commit_msg>Make sure not to commit non-compiling code<commit_after>import pandas as pd
import os.path
TEMPERATURES_FILE = 'data/USCityTemperaturesAfter1850.csv'
CITY_STATE_FILE = 'data/city_state.csv'
def load_data(path='data/GlobalLandTemperaturesbyCity.csv', ignore_before=1850):
out = pd.read_csv(path, header=0)
us = out.loc[out['Country'] == 'United States']
us = us.loc[us['dt'] > 1850]
us.to_csv(TEMPERATURES_FILE)
return us
def city_country(raw_file='data/RawUSData.csv'):
out = pd.read_csv(raw_file)
keep = ['Name', 'Canonical Name']
us = out[keep]
us = us.assign(State = us['Canonical Name'].apply(get_state))
us = us.rename(columns={'Name':'City'})
us = us[['City', 'State']]
us.to_csv(CITY_STATE_FILE)
def get_state(raw_string):
return raw_string.split(',')[-2]
def main():
if not os.path.isfile(TEMPERATURES_FILE): # TODO: add force make file
load_data()
data = pd.read_csv(TEMPERATURES_FILE)
if not os.path.isfile(CITY_STATE_FILE):
city_country()
cities = pd.read_csv(CITY_STATE_FILE)
if __name__ == "__main__":
main()
|
b26a92d1e1480a73de4ce5ebe6ea4630fb3bfbc8
|
main.py
|
main.py
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
|
Add custom 500 error handler so app handler errors aren't supressed
|
Add custom 500 error handler so app handler errors aren't supressed
|
Python
|
apache-2.0
|
psykidellic/appengine-flask-skeleton,STEMgirlsChina/flask-tools,susnata1981/lendingclub,wink-app/wink,googlearchive/appengine-flask-skeleton,igorg1312/googlepythonsskeleton,lchans/ArcAudit,bruxr/Sirius2,waprin/appengine-flask-skeleton,jonparrott/flask-ferris-example,waprin/appengine-flask-skeleton,jsatch/twitclass,susnata1981/lendingclub,lchans/ArcAudit,igorg1312/googlepythonsskeleton,psykidellic/appengine-flask-skeleton,googlearchive/appengine-flask-skeleton,hefox/ttm,giantoak/memex-cluster-analysis,jholkeboer/tau-graphical-crawler,ThomasMarcel/tom-schneider-flask,klenwell/mushpup-demo,psykidellic/appengine-flask-skeleton,susnata1981/lendingclub,pwojt/beer_app_414,aaleotti-unimore/ComicsScraper,aaleotti-unimore/ComicsScraper,aaleotti-unimore/ComicsScraper,n8henrie/icw,ThomasMarcel/tom-schneider-flask,jonparrott/App-Engine-Flask-Restful-Example,psykidellic/appengine-flask-skeleton,bruxr/Sirius2,wink-app/wink,VipinDevineni/lendingclub,lchans/ArcAudit,kellielu/q,jholkeboer/tau-graphical-crawler,igorg1312/googlepythonsskeleton,kellielu/q,VipinDevineni/lendingclub,VipinDevineni/lendingclub,ThomasMarcel/tom-schneider-flask,wink-app/wink,wd15/rot13,thedataincubator/GAE-Timing,psykidellic/appengine-flask-skeleton,kellielu/q,wink-app/wink,wd15/wiki,jholkeboer/tau-graphical-crawler,hammertoe/didactic-spork,hammertoe/didactic-spork,HeewonLee/asekfubweibfuisdbf,n8henrie/icw,djw8605/GratiaChromeShareApp,googlearchive/appengine-flask-skeleton,hefox/ttm,STEMgirlsChina/flask-tools,ashishthedev/appengine-python-flask-skeleton,hammertoe/didactic-spork,sin111014/asdfasdf,igorg1312/googlepythonsskeleton,googlearchive/appengine-flask-skeleton,welyjesch/gae-flask,hammertoe/didactic-spork,ThomasMarcel/tom-schneider-flask,mnrozhkov/appengine-python-flask-skeleton,jonparrott/flask-ferris-example,n8henrie/icw,jholkeboer/tau-graphical-crawler,n8henrie/icw,jholkeboer/tau-graphical-crawler,welyjesch/gae-flask,n8henrie/icw,klenwell/mushpup-demo,waprin/appengine-flask-skeleton,klenwell/mushpup-demo,bruxr/Sirius2,STEMgirlsChina/flask-tools,waprin/appengine-flask-skeleton,aaleotti-unimore/ComicsScraper,kellielu/q,aaleotti-unimore/ComicsScraper
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
Add custom 500 error handler so app handler errors aren't supressed
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
|
<commit_before>"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
<commit_msg>Add custom 500 error handler so app handler errors aren't supressed<commit_after>
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
|
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
Add custom 500 error handler so app handler errors aren't supressed"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
|
<commit_before>"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
<commit_msg>Add custom 500 error handler so app handler errors aren't supressed<commit_after>"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.