commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a1de1f26d30a753c7e3ea66a600b04fb130b02ec | hellomama_registration/testsettings.py | hellomama_registration/testsettings.py | from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
| from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
| Use MD5 password hasher for test runs | Use MD5 password hasher for test runs
| Python | bsd-3-clause | praekelt/hellomama-registration,praekelt/hellomama-registration | from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
Use MD5 password hasher for test runs | from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
| <commit_before>from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
<commit_msg>Use MD5 password hasher for test runs<commit_after> | from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
| from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
Use MD5 password hasher for test runsfrom hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
| <commit_before>from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
<commit_msg>Use MD5 password hasher for test runs<commit_after>from hellomama_registration.settings import * # flake8: noqa
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'TESTSEKRET'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
METRICS_URL = "http://metrics-url"
METRICS_AUTH_TOKEN = "REPLACEME"
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
|
f7a9391c22a1b69bcd645a3efbe41f32e91e668c | hb_res/resources/ExplanationStorage.py | hb_res/resources/ExplanationStorage.py | __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
return NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError | __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
raise NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError
| Fix typo with return instead of raise | Fix typo with return instead of raise
| Python | mit | hatbot-team/hatbot_resources | __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
return NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedErrorFix typo with return instead of raise | __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
raise NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError
| <commit_before>__author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
return NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError<commit_msg>Fix typo with return instead of raise<commit_after> | __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
raise NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError
| __author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
return NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedErrorFix typo with return instead of raise__author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
raise NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError
| <commit_before>__author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
return NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError<commit_msg>Fix typo with return instead of raise<commit_after>__author__ = 'skird'
class ExplanationStorage:
"""
Interface of abstract readable/writeable resource
Every resource is a map from string to list of strings
It supports random access and provides iterator on its elements
"""
def entries(self):
raise NotImplementedError
def add_entry(self, entry) -> None:
raise NotImplementedError
def clear(self) -> None:
raise NotImplementedError
def __getitem__(self, item):
raise NotImplementedError
|
6629a3a238432522d77f840b465eb99a3745593f | django_base64field/tests/models.py | django_base64field/tests/models.py | from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
| from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
# Making `ek` unique just because it will be used as `FK`
# in other models.
ek = Base64Field(unique=True)
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
| Add little bit comments for Planet model | Add little bit comments for Planet model
| Python | bsd-3-clause | Alir3z4/django-base64field | from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
Add little bit comments for Planet model | from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
# Making `ek` unique just because it will be used as `FK`
# in other models.
ek = Base64Field(unique=True)
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
| <commit_before>from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
<commit_msg>Add little bit comments for Planet model<commit_after> | from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
# Making `ek` unique just because it will be used as `FK`
# in other models.
ek = Base64Field(unique=True)
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
| from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
Add little bit comments for Planet modelfrom django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
# Making `ek` unique just because it will be used as `FK`
# in other models.
ek = Base64Field(unique=True)
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
| <commit_before>from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
<commit_msg>Add little bit comments for Planet model<commit_after>from django.db import models
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
# Making `ek` unique just because it will be used as `FK`
# in other models.
ek = Base64Field(unique=True)
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class Helper(models.Model):
"""
base64 encoded value won't be available at first time creation.
It can ve accessible by getting the object from database after creation
mean when it get saved completely, But what if we don't want to get our base64
encoded key from our sweet model by retrieving it again from database?
It's easy, efficient, holly and molly!
"""
ek = Base64Field()
def _ek(self):
if self.ek: return self.ek
if not self.ek and self.pk:
return base64.encode(self.pk)
return self.ek
class CustomReceiver(models.Model):
"""
Passing custom receiver to generate `youyouid` with a custom receiver.
"""
youyouid = Base64Field(
encode_receiver='django_base64field.tests.receivers:custom_receiver'
)
|
084eac5735404edeed62cee4e2b429c8f4f2a7a5 | app/dao/inbound_numbers_dao.py | app/dao/inbound_numbers_dao.py | from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
| from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
| Update dao to order by updated_at, number | Update dao to order by updated_at, number
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
Update dao to order by updated_at, number | from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
| <commit_before>from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
<commit_msg>Update dao to order by updated_at, number<commit_after> | from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
| from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
Update dao to order by updated_at, numberfrom app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
| <commit_before>from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
<commit_msg>Update dao to order by updated_at, number<commit_after>from app import db
from app.dao.dao_utils import transactional
from app.models import InboundNumber
def dao_get_inbound_numbers():
return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all()
def dao_get_available_inbound_numbers():
return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all()
def dao_get_inbound_number_for_service(service_id):
return InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
def dao_get_inbound_number(inbound_number_id):
return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first()
@transactional
def dao_set_inbound_number_to_service(service_id, inbound_number):
inbound_number.service_id = service_id
db.session.add(inbound_number)
@transactional
def dao_set_inbound_number_active_flag(service_id, active):
inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first()
inbound_number.active = active
db.session.add(inbound_number)
|
e8ebb4e9be78e32bc59b1f03cd4854add1148de3 | extra.py | extra.py | Import('env')
env.Append(CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
| Import('env')
env.Append(CFLAGS=["-std=gnu11"], CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
| Make C11 default. pio should actually do this. | Make C11 default. pio should actually do this.
Signed-off-by: David Graeff <cddf4d21ffd604ccd3fdffe1267f1f9d72e179ea@web.de>
| Python | mit | Openhab-Nodes/libWakaamaEmb,Openhab-Nodes/libWakaamaEmb,Openhab-Nodes/libWakaamaEmb,Openhab-Nodes/libWakaamaEmb | Import('env')
env.Append(CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
Make C11 default. pio should actually do this.
Signed-off-by: David Graeff <cddf4d21ffd604ccd3fdffe1267f1f9d72e179ea@web.de> | Import('env')
env.Append(CFLAGS=["-std=gnu11"], CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
| <commit_before>Import('env')
env.Append(CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
<commit_msg>Make C11 default. pio should actually do this.
Signed-off-by: David Graeff <cddf4d21ffd604ccd3fdffe1267f1f9d72e179ea@web.de><commit_after> | Import('env')
env.Append(CFLAGS=["-std=gnu11"], CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
| Import('env')
env.Append(CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
Make C11 default. pio should actually do this.
Signed-off-by: David Graeff <cddf4d21ffd604ccd3fdffe1267f1f9d72e179ea@web.de>Import('env')
env.Append(CFLAGS=["-std=gnu11"], CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
| <commit_before>Import('env')
env.Append(CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
<commit_msg>Make C11 default. pio should actually do this.
Signed-off-by: David Graeff <cddf4d21ffd604ccd3fdffe1267f1f9d72e179ea@web.de><commit_after>Import('env')
env.Append(CFLAGS=["-std=gnu11"], CXXFLAGS=["-std=c++11"], CPPPATH=["src/wakaama", "wakaama"])
|
ec867f87441af657f0138ec723de4a52299284d8 | src/epiweb/apps/survey/urls.py | src/epiweb/apps/survey/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^profile/$', 'epiweb.apps.survey.views.profile_index'),
(r'^thanks/$', 'epiweb.apps.survey.views.thanks'),
url(r'^people/$', 'epiweb.apps.survey.views.people', name='survey_people'),
url(r'^people/add/$', 'epiweb.apps.survey.views.people_add', name='survey_people_add'),
(r'^$', 'epiweb.apps.survey.views.index'),
)
| from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
(r'^profile/$', views.profile_index),
(r'^thanks/$', views.thanks),
url(r'^people/$', views.people, name='survey_people'),
url(r'^people/add/$', views.people_add, name='survey_people_add'),
(r'^$', views.index),
)
| Use view's function instead of its name | Use view's function instead of its name
| Python | agpl-3.0 | ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website | from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^profile/$', 'epiweb.apps.survey.views.profile_index'),
(r'^thanks/$', 'epiweb.apps.survey.views.thanks'),
url(r'^people/$', 'epiweb.apps.survey.views.people', name='survey_people'),
url(r'^people/add/$', 'epiweb.apps.survey.views.people_add', name='survey_people_add'),
(r'^$', 'epiweb.apps.survey.views.index'),
)
Use view's function instead of its name | from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
(r'^profile/$', views.profile_index),
(r'^thanks/$', views.thanks),
url(r'^people/$', views.people, name='survey_people'),
url(r'^people/add/$', views.people_add, name='survey_people_add'),
(r'^$', views.index),
)
| <commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^profile/$', 'epiweb.apps.survey.views.profile_index'),
(r'^thanks/$', 'epiweb.apps.survey.views.thanks'),
url(r'^people/$', 'epiweb.apps.survey.views.people', name='survey_people'),
url(r'^people/add/$', 'epiweb.apps.survey.views.people_add', name='survey_people_add'),
(r'^$', 'epiweb.apps.survey.views.index'),
)
<commit_msg>Use view's function instead of its name<commit_after> | from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
(r'^profile/$', views.profile_index),
(r'^thanks/$', views.thanks),
url(r'^people/$', views.people, name='survey_people'),
url(r'^people/add/$', views.people_add, name='survey_people_add'),
(r'^$', views.index),
)
| from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^profile/$', 'epiweb.apps.survey.views.profile_index'),
(r'^thanks/$', 'epiweb.apps.survey.views.thanks'),
url(r'^people/$', 'epiweb.apps.survey.views.people', name='survey_people'),
url(r'^people/add/$', 'epiweb.apps.survey.views.people_add', name='survey_people_add'),
(r'^$', 'epiweb.apps.survey.views.index'),
)
Use view's function instead of its namefrom django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
(r'^profile/$', views.profile_index),
(r'^thanks/$', views.thanks),
url(r'^people/$', views.people, name='survey_people'),
url(r'^people/add/$', views.people_add, name='survey_people_add'),
(r'^$', views.index),
)
| <commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^profile/$', 'epiweb.apps.survey.views.profile_index'),
(r'^thanks/$', 'epiweb.apps.survey.views.thanks'),
url(r'^people/$', 'epiweb.apps.survey.views.people', name='survey_people'),
url(r'^people/add/$', 'epiweb.apps.survey.views.people_add', name='survey_people_add'),
(r'^$', 'epiweb.apps.survey.views.index'),
)
<commit_msg>Use view's function instead of its name<commit_after>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
(r'^profile/$', views.profile_index),
(r'^thanks/$', views.thanks),
url(r'^people/$', views.people, name='survey_people'),
url(r'^people/add/$', views.people_add, name='survey_people_add'),
(r'^$', views.index),
)
|
e85e5ea6e2a8b188ff79d114ae0546c8d3ca4c73 | examples/MNIST/mnist.py | examples/MNIST/mnist.py | import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz')
print('Downloading data from %s' % origin)
urlretrieve(origin, data_file)
print('... loading data')
f = gzip.open(data_file, 'rb')
if sys.version_info[0] == 3:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
else:
train_set, valid_set, test_set = pickle.load(f)
f.close()
train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set
return (train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)
| import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
print('Downloading data from {}'.format(origin))
urlretrieve(origin, data_file)
print('... loading data')
with gzip.open(data_file, 'rb') as f:
if sys.version_info[0] == 3:
return pickle.load(f, encoding='latin1')
else:
return pickle.load(f)
| Simplify code loading MNIST dataset. | Simplify code loading MNIST dataset.
| Python | mit | VisualComputingInstitute/Beacon8,lucasb-eyer/DeepFried2,elPistolero/DeepFried2,yobibyte/DeepFried2,Pandoro/DeepFried2 | import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz')
print('Downloading data from %s' % origin)
urlretrieve(origin, data_file)
print('... loading data')
f = gzip.open(data_file, 'rb')
if sys.version_info[0] == 3:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
else:
train_set, valid_set, test_set = pickle.load(f)
f.close()
train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set
return (train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)
Simplify code loading MNIST dataset. | import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
print('Downloading data from {}'.format(origin))
urlretrieve(origin, data_file)
print('... loading data')
with gzip.open(data_file, 'rb') as f:
if sys.version_info[0] == 3:
return pickle.load(f, encoding='latin1')
else:
return pickle.load(f)
| <commit_before>import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz')
print('Downloading data from %s' % origin)
urlretrieve(origin, data_file)
print('... loading data')
f = gzip.open(data_file, 'rb')
if sys.version_info[0] == 3:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
else:
train_set, valid_set, test_set = pickle.load(f)
f.close()
train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set
return (train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)
<commit_msg>Simplify code loading MNIST dataset.<commit_after> | import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
print('Downloading data from {}'.format(origin))
urlretrieve(origin, data_file)
print('... loading data')
with gzip.open(data_file, 'rb') as f:
if sys.version_info[0] == 3:
return pickle.load(f, encoding='latin1')
else:
return pickle.load(f)
| import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz')
print('Downloading data from %s' % origin)
urlretrieve(origin, data_file)
print('... loading data')
f = gzip.open(data_file, 'rb')
if sys.version_info[0] == 3:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
else:
train_set, valid_set, test_set = pickle.load(f)
f.close()
train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set
return (train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)
Simplify code loading MNIST dataset.import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
print('Downloading data from {}'.format(origin))
urlretrieve(origin, data_file)
print('... loading data')
with gzip.open(data_file, 'rb') as f:
if sys.version_info[0] == 3:
return pickle.load(f, encoding='latin1')
else:
return pickle.load(f)
| <commit_before>import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz')
print('Downloading data from %s' % origin)
urlretrieve(origin, data_file)
print('... loading data')
f = gzip.open(data_file, 'rb')
if sys.version_info[0] == 3:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
else:
train_set, valid_set, test_set = pickle.load(f)
f.close()
train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set
return (train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)
<commit_msg>Simplify code loading MNIST dataset.<commit_after>import os
import gzip
import pickle
import sys
# Python 2/3 compatibility.
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
'''Adapted from theano tutorial'''
def load_mnist(data_file = os.path.join(os.path.dirname(__file__), 'mnist.pkl.gz')):
if not os.path.exists(data_file):
origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
print('Downloading data from {}'.format(origin))
urlretrieve(origin, data_file)
print('... loading data')
with gzip.open(data_file, 'rb') as f:
if sys.version_info[0] == 3:
return pickle.load(f, encoding='latin1')
else:
return pickle.load(f)
|
6ce0b5fabd3573ac3c3feb30e8fb48af16d2504f | apps/users/tests/test_profile_admin.py | apps/users/tests/test_profile_admin.py | import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
response = self.client.post('/profile/edit/', post_data, follow=True)
self.assertRedirects(response, redirect, status_code=301)
| from contextlib import contextmanager
import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
@contextmanager
def given_user(fake_auth, user):
"""Context manager to respond to any login call with a specific user."""
fake_auth.expects_call().returns(user)
yield
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
with given_user(fake, self.User):
self.client.login()
response = self.client.post('/profile/edit/', post_data,
follow=True)
try:
self.assertRedirects(response, redirect, status_code=301)
except AssertionError:
print response.redirect_chain
raise
| Fix the profile test to log in as the test user. | Fix the profile test to log in as the test user. | Python | bsd-3-clause | mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite | import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
response = self.client.post('/profile/edit/', post_data, follow=True)
self.assertRedirects(response, redirect, status_code=301)
Fix the profile test to log in as the test user. | from contextlib import contextmanager
import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
@contextmanager
def given_user(fake_auth, user):
"""Context manager to respond to any login call with a specific user."""
fake_auth.expects_call().returns(user)
yield
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
with given_user(fake, self.User):
self.client.login()
response = self.client.post('/profile/edit/', post_data,
follow=True)
try:
self.assertRedirects(response, redirect, status_code=301)
except AssertionError:
print response.redirect_chain
raise
| <commit_before>import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
response = self.client.post('/profile/edit/', post_data, follow=True)
self.assertRedirects(response, redirect, status_code=301)
<commit_msg>Fix the profile test to log in as the test user.<commit_after> | from contextlib import contextmanager
import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
@contextmanager
def given_user(fake_auth, user):
"""Context manager to respond to any login call with a specific user."""
fake_auth.expects_call().returns(user)
yield
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
with given_user(fake, self.User):
self.client.login()
response = self.client.post('/profile/edit/', post_data,
follow=True)
try:
self.assertRedirects(response, redirect, status_code=301)
except AssertionError:
print response.redirect_chain
raise
| import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
response = self.client.post('/profile/edit/', post_data, follow=True)
self.assertRedirects(response, redirect, status_code=301)
Fix the profile test to log in as the test user.from contextlib import contextmanager
import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
@contextmanager
def given_user(fake_auth, user):
"""Context manager to respond to any login call with a specific user."""
fake_auth.expects_call().returns(user)
yield
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
with given_user(fake, self.User):
self.client.login()
response = self.client.post('/profile/edit/', post_data,
follow=True)
try:
self.assertRedirects(response, redirect, status_code=301)
except AssertionError:
print response.redirect_chain
raise
| <commit_before>import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
response = self.client.post('/profile/edit/', post_data, follow=True)
self.assertRedirects(response, redirect, status_code=301)
<commit_msg>Fix the profile test to log in as the test user.<commit_after>from contextlib import contextmanager
import fudge
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from users.models import Profile, Link
@contextmanager
def given_user(fake_auth, user):
"""Context manager to respond to any login call with a specific user."""
fake_auth.expects_call().returns(user)
yield
class ProfileAdmin(TestCase):
def setUp(self):
self.client = Client()
self.User = User.objects.create(
username=u'test_ross',
password=u'password2',
is_active=True
)
self.profile = Profile.objects.create(
user=self.User
)
@fudge.patch('django_browserid.auth.BrowserIDBackend.authenticate')
def test_edit_without_links(self, fake):
redirect = '/profile/%s/' % self.User.username
post_data = {
'name': 'Boozeniges',
'link_url': 'http://ross-eats.co.uk',
'link_name': 'ross eats'
}
with given_user(fake, self.User):
self.client.login()
response = self.client.post('/profile/edit/', post_data,
follow=True)
try:
self.assertRedirects(response, redirect, status_code=301)
except AssertionError:
print response.redirect_chain
raise
|
bcfb29272d727ee2775c9f212053725e4a562752 | pip_review/__init__.py | pip_review/__init__.py | from functools import partial
import subprocess
import requests
import multiprocessing
import json
def get_pkg_info(pkg_name, session):
r = session.get('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
if r.status_code == requests.codes.ok:
return json.loads(r.text)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, session, silent=False):
try:
info = get_pkg_info(pkg_name, session)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
with requests.session() as session:
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, session=session, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
| from functools import partial
import subprocess
import urllib2
import multiprocessing
import json
def get_pkg_info(pkg_name):
req = urllib2.Request('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
handler = urllib2.urlopen(req)
status = handler.getcode()
if status == 200:
content = handler.read()
return json.loads(content)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, silent=False):
try:
info = get_pkg_info(pkg_name)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
| Remove the requests dependency altogether. | Remove the requests dependency altogether.
(Makes no sense for such small a tool.)
| Python | bsd-2-clause | suutari-ai/prequ,suutari/prequ,suutari/prequ | from functools import partial
import subprocess
import requests
import multiprocessing
import json
def get_pkg_info(pkg_name, session):
r = session.get('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
if r.status_code == requests.codes.ok:
return json.loads(r.text)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, session, silent=False):
try:
info = get_pkg_info(pkg_name, session)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
with requests.session() as session:
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, session=session, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
Remove the requests dependency altogether.
(Makes no sense for such small a tool.) | from functools import partial
import subprocess
import urllib2
import multiprocessing
import json
def get_pkg_info(pkg_name):
req = urllib2.Request('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
handler = urllib2.urlopen(req)
status = handler.getcode()
if status == 200:
content = handler.read()
return json.loads(content)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, silent=False):
try:
info = get_pkg_info(pkg_name)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
| <commit_before>from functools import partial
import subprocess
import requests
import multiprocessing
import json
def get_pkg_info(pkg_name, session):
r = session.get('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
if r.status_code == requests.codes.ok:
return json.loads(r.text)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, session, silent=False):
try:
info = get_pkg_info(pkg_name, session)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
with requests.session() as session:
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, session=session, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
<commit_msg>Remove the requests dependency altogether.
(Makes no sense for such small a tool.)<commit_after> | from functools import partial
import subprocess
import urllib2
import multiprocessing
import json
def get_pkg_info(pkg_name):
req = urllib2.Request('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
handler = urllib2.urlopen(req)
status = handler.getcode()
if status == 200:
content = handler.read()
return json.loads(content)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, silent=False):
try:
info = get_pkg_info(pkg_name)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
| from functools import partial
import subprocess
import requests
import multiprocessing
import json
def get_pkg_info(pkg_name, session):
r = session.get('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
if r.status_code == requests.codes.ok:
return json.loads(r.text)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, session, silent=False):
try:
info = get_pkg_info(pkg_name, session)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
with requests.session() as session:
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, session=session, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
Remove the requests dependency altogether.
(Makes no sense for such small a tool.)from functools import partial
import subprocess
import urllib2
import multiprocessing
import json
def get_pkg_info(pkg_name):
req = urllib2.Request('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
handler = urllib2.urlopen(req)
status = handler.getcode()
if status == 200:
content = handler.read()
return json.loads(content)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, silent=False):
try:
info = get_pkg_info(pkg_name)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
| <commit_before>from functools import partial
import subprocess
import requests
import multiprocessing
import json
def get_pkg_info(pkg_name, session):
r = session.get('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
if r.status_code == requests.codes.ok:
return json.loads(r.text)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, session, silent=False):
try:
info = get_pkg_info(pkg_name, session)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
with requests.session() as session:
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, session=session, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
<commit_msg>Remove the requests dependency altogether.
(Makes no sense for such small a tool.)<commit_after>from functools import partial
import subprocess
import urllib2
import multiprocessing
import json
def get_pkg_info(pkg_name):
req = urllib2.Request('http://pypi.python.org/pypi/%s/json' % (pkg_name,))
handler = urllib2.urlopen(req)
status = handler.getcode()
if status == 200:
content = handler.read()
return json.loads(content)
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
def latest_version(pkg_name, silent=False):
try:
info = get_pkg_info(pkg_name)
except ValueError:
if silent:
return None
else:
raise
return info['info']['version']
def get_latest_versions(pkg_names):
pool = multiprocessing.Pool(min(12, len(pkg_names)))
get_latest = partial(latest_version, silent=True)
versions = pool.map(get_latest, pkg_names)
return zip(pkg_names, versions)
def get_installed_pkgs(editables=False):
for line in subprocess.check_output(['pip', 'freeze']).split('\n'):
if not line:
continue
if line.startswith('-e'):
if editables:
yield line.split('#egg=', 1)[1], None, True
else:
name, version = line.split('==')
yield name, version, False
|
f9f12e89e526b2645f013fc5856488d105a39d5a | bouncer/sentry.py | bouncer/sentry.py | """Report exceptions to Sentry/Raven."""
from pyramid import tweens
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
config.registry["raven.client"] = raven.Client(
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
| """Report exceptions to Sentry/Raven."""
import os
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
| Add environment name to Sentry client | Add environment name to Sentry client
Skyliner provides the ENV environment variable for our application,
which contains either "prod" or "qa" depending on the environment.
Sentry supports partitioning reports based on the environment within a
single application. Adding this metadata to the `Client` allows us to
migrate QA and Prod onto a single application in Sentry.
| Python | bsd-2-clause | hypothesis/bouncer,hypothesis/bouncer,hypothesis/bouncer | """Report exceptions to Sentry/Raven."""
from pyramid import tweens
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
config.registry["raven.client"] = raven.Client(
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
Add environment name to Sentry client
Skyliner provides the ENV environment variable for our application,
which contains either "prod" or "qa" depending on the environment.
Sentry supports partitioning reports based on the environment within a
single application. Adding this metadata to the `Client` allows us to
migrate QA and Prod onto a single application in Sentry. | """Report exceptions to Sentry/Raven."""
import os
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
| <commit_before>"""Report exceptions to Sentry/Raven."""
from pyramid import tweens
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
config.registry["raven.client"] = raven.Client(
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
<commit_msg>Add environment name to Sentry client
Skyliner provides the ENV environment variable for our application,
which contains either "prod" or "qa" depending on the environment.
Sentry supports partitioning reports based on the environment within a
single application. Adding this metadata to the `Client` allows us to
migrate QA and Prod onto a single application in Sentry.<commit_after> | """Report exceptions to Sentry/Raven."""
import os
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
| """Report exceptions to Sentry/Raven."""
from pyramid import tweens
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
config.registry["raven.client"] = raven.Client(
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
Add environment name to Sentry client
Skyliner provides the ENV environment variable for our application,
which contains either "prod" or "qa" depending on the environment.
Sentry supports partitioning reports based on the environment within a
single application. Adding this metadata to the `Client` allows us to
migrate QA and Prod onto a single application in Sentry."""Report exceptions to Sentry/Raven."""
import os
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
| <commit_before>"""Report exceptions to Sentry/Raven."""
from pyramid import tweens
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
config.registry["raven.client"] = raven.Client(
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
<commit_msg>Add environment name to Sentry client
Skyliner provides the ENV environment variable for our application,
which contains either "prod" or "qa" depending on the environment.
Sentry supports partitioning reports based on the environment within a
single application. Adding this metadata to the `Client` allows us to
migrate QA and Prod onto a single application in Sentry.<commit_after>"""Report exceptions to Sentry/Raven."""
import os
import raven
from bouncer import __version__
def get_raven_client(request):
"""Return the Raven client for reporting crashes to Sentry."""
client = request.registry["raven.client"]
client.http_context({
"url": request.url,
"method": request.method,
})
request.add_finished_callback(
lambda request: client.context.clear())
return client
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)
|
79cb9edf45ed77cdaa851e45d71f10c69db41221 | benchexec/tools/yogar-cbmc-parallel.py | benchexec/tools/yogar-cbmc-parallel.py | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| Add forgotten program file for deployment | Add forgotten program file for deployment
| Python | apache-2.0 | ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
Add forgotten program file for deployment | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| <commit_before>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
<commit_msg>Add forgotten program file for deployment<commit_after> | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
Add forgotten program file for deployment"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| <commit_before>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
<commit_msg>Add forgotten program file for deployment<commit_after>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
94c943b3dcabb1611a745caef0ecf6da10e807e4 | kiteconnect/__version__.py | kiteconnect/__version__.py | __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
| __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0.b1"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
| Mark release as beta in setup.py | Mark release as beta in setup.py
| Python | mit | rainmattertech/pykiteconnect | __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
Mark release as beta in setup.py | __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0.b1"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
| <commit_before>__title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
<commit_msg>Mark release as beta in setup.py<commit_after> | __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0.b1"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
| __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
Mark release as beta in setup.py__title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0.b1"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
| <commit_before>__title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
<commit_msg>Mark release as beta in setup.py<commit_after>__title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.7.0.b1"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "talk@zerodha.tech"
__license__ = "MIT"
|
6b53e14ec1f5c71d2aa1f17a4108f4e1d88e8b89 | pywikibot/families/wikia_family.py | pywikibot/families/wikia_family.py | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| Update a version number from trunk r9016 | Update a version number from trunk r9016
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9040 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6
| Python | mit | legoktm/pywikipedia-rewrite | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9040 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6 | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| <commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9040 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6<commit_after> | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9040 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| <commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9040 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
98f1df191febd889fcde861f94a9ca126c60ea37 | tests/GIR/runalltests.py | tests/GIR/runalltests.py | # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
if not result.wasSuccessful():
sys.exit(1)
| # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if not result.wasSuccessful():
sys.exit(1)
| Remove SQLite database before running tests | Remove SQLite database before running tests
| Python | lgpl-2.1 | midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core | # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
if not result.wasSuccessful():
sys.exit(1)
Remove SQLite database before running tests | # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if not result.wasSuccessful():
sys.exit(1)
| <commit_before># -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
if not result.wasSuccessful():
sys.exit(1)
<commit_msg>Remove SQLite database before running tests<commit_after> | # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if not result.wasSuccessful():
sys.exit(1)
| # -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
if not result.wasSuccessful():
sys.exit(1)
Remove SQLite database before running tests# -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if not result.wasSuccessful():
sys.exit(1)
| <commit_before># -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
if not result.wasSuccessful():
sys.exit(1)
<commit_msg>Remove SQLite database before running tests<commit_after># -*- Mode: Python -*-
import os
import glob
import sys
import shutil
import unittest
if os.path.isfile("./test_data/test_gir.db"):
os.remove("./test_data/test_gir.db")
testLoader = unittest.TestLoader()
names = []
for filename in glob.iglob("test_*.py"):
names.append(filename[:-3])
names.sort()
testSuite = testLoader.loadTestsFromNames(names)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(testSuite)
# Cleanup
try:
shutil.rmtree("./test_data/blobs")
except OSError:
pass
if not result.wasSuccessful():
sys.exit(1)
|
74dcf36c2eecab290c1c76c947b024e51d280ea7 | tests/test_rover_init.py | tests/test_rover_init.py | def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
| def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
| Add testing for default grid_* values | Add testing for default grid_* values
| Python | mit | authentik8/rover | def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
Add testing for default grid_* values | def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
| <commit_before>def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
<commit_msg>Add testing for default grid_* values<commit_after> | def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
| def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
Add testing for default grid_* valuesdef test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
| <commit_before>def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
<commit_msg>Add testing for default grid_* values<commit_after>def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
|
a9a01b2bb07fadb6e9ab07228fc6857f28e4d444 | src/edge/context_processors.py | src/edge/context_processors.py | import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
| import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
| Fix flake8 error by adding extra line | Fix flake8 error by adding extra line
| Python | mit | ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge | import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
Fix flake8 error by adding extra line | import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
| <commit_before>import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
<commit_msg>Fix flake8 error by adding extra line<commit_after> | import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
| import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
Fix flake8 error by adding extra lineimport os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
| <commit_before>import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
<commit_msg>Fix flake8 error by adding extra line<commit_after>import os
def export_envs(request):
data = {}
data['FULLSTORY_ORG_ID'] = os.getenv('FULLSTORY_ORG_ID', '')
return data
|
fa7ffa32f0484c45a665de2766a97ae7a23a0b6d | pyicloud/utils.py | pyicloud/utils.py | import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'ICloud Password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
| import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'Enter iCloud password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
| Tweak wording of password prompt | Tweak wording of password prompt
iCloud is branded with a lower case 'i' like most other Apple products.
| Python | mit | picklepete/pyicloud,picklepete/pyicloud | import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'ICloud Password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
Tweak wording of password prompt
iCloud is branded with a lower case 'i' like most other Apple products. | import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'Enter iCloud password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
| <commit_before>import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'ICloud Password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
<commit_msg>Tweak wording of password prompt
iCloud is branded with a lower case 'i' like most other Apple products.<commit_after> | import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'Enter iCloud password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
| import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'ICloud Password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
Tweak wording of password prompt
iCloud is branded with a lower case 'i' like most other Apple products.import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'Enter iCloud password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
| <commit_before>import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'ICloud Password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
<commit_msg>Tweak wording of password prompt
iCloud is branded with a lower case 'i' like most other Apple products.<commit_after>import getpass
import keyring
from .exceptions import NoStoredPasswordAvailable
KEYRING_SYSTEM = 'pyicloud://icloud-password'
def get_password(username, interactive=True):
try:
return get_password_from_keyring(username)
except NoStoredPasswordAvailable:
if not interactive:
raise
return getpass.getpass(
'Enter iCloud password for {username}: '.format(
username=username,
)
)
def password_exists_in_keyring(username):
try:
get_password_from_keyring(username)
except NoStoredPasswordAvailable:
return False
return True
def get_password_from_keyring(username):
result = keyring.get_password(
KEYRING_SYSTEM,
username
)
if result is None:
raise NoStoredPasswordAvailable(
"No pyicloud password for {username} could be found "
"in the system keychain. Use the `--store-in-keyring` "
"command-line option for storing a password for this "
"username.".format(
username=username,
)
)
return result
def store_password_in_keyring(username, password):
return keyring.set_password(
KEYRING_SYSTEM,
username,
password,
)
def delete_password_in_keyring(username):
return keyring.delete_password(
KEYRING_SYSTEM,
username,
)
|
cd189a5cbf8cbb567efaba0e92b3c31278817a39 | pnrg/filters.py | pnrg/filters.py | from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\LaTeX')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
| from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\textrm{\\LaTeX}')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
| Use Computer Modern for \LaTeX macro | Use Computer Modern for \LaTeX macro
Source Sans pro (and most othe sans-serif fonts) render the LaTeX macro
pretty weirdly, so the classic Computer Modern should be okay here. It
may wind up being an odd contrast in an otherwise sans-serif document
though, so this may eventually get reverted!
| Python | mit | sjbarag/poorly-named-resume-generator,sjbarag/poorly-named-resume-generator | from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\LaTeX')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
Use Computer Modern for \LaTeX macro
Source Sans pro (and most othe sans-serif fonts) render the LaTeX macro
pretty weirdly, so the classic Computer Modern should be okay here. It
may wind up being an odd contrast in an otherwise sans-serif document
though, so this may eventually get reverted! | from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\textrm{\\LaTeX}')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
| <commit_before>from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\LaTeX')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
<commit_msg>Use Computer Modern for \LaTeX macro
Source Sans pro (and most othe sans-serif fonts) render the LaTeX macro
pretty weirdly, so the classic Computer Modern should be okay here. It
may wind up being an odd contrast in an otherwise sans-serif document
though, so this may eventually get reverted!<commit_after> | from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\textrm{\\LaTeX}')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
| from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\LaTeX')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
Use Computer Modern for \LaTeX macro
Source Sans pro (and most othe sans-serif fonts) render the LaTeX macro
pretty weirdly, so the classic Computer Modern should be okay here. It
may wind up being an odd contrast in an otherwise sans-serif document
though, so this may eventually get reverted!from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\textrm{\\LaTeX}')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
| <commit_before>from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\LaTeX')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
<commit_msg>Use Computer Modern for \LaTeX macro
Source Sans pro (and most othe sans-serif fonts) render the LaTeX macro
pretty weirdly, so the classic Computer Modern should be okay here. It
may wind up being an odd contrast in an otherwise sans-serif document
though, so this may eventually get reverted!<commit_after>from jinja2._compat import text_type
import re
def do_right(value, width=80):
"""Right-justifies the value in a field of a given width."""
return text_type(value).rjust(width)
_LATEX_SUBS = (
(re.compile(r'\\'), r'\\textbackslash'),
(re.compile(r'([{}_#%&$])'), r'\\\1'),
(re.compile(r'~'), r'\~{}'),
(re.compile(r'\^'), r'\^{}'),
(re.compile(r'"'), r"''"),
(re.compile(r'\.\.\.+'), r'\\ldots'),
(re.compile(r'&'), r'&'),
(re.compile(r'LaTeX'), r'\\textrm{\\LaTeX}')
)
def escape_tex(value):
"""
Escapes TeX characters to avoid breaking {La,Lua,Xe}Tex compilers.
Kang'd (with permission!) from http://flask.pocoo.org/snippets/55/
"""
newval = value
for pattern, replacement in _LATEX_SUBS:
newval = pattern.sub(replacement, newval)
return newval
|
d1a893514092a2495fcd2b14365b9b014fd39f59 | calaccess_processed/models/__init__.py | calaccess_processed/models/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'MonetaryContribution',
'MonetaryContributionVersion',
'NonMonetaryContribution',
'NonMonetaryContributionVersion',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
| Add missing models to __all__ list | Add missing models to __all__ list
| Python | mit | california-civic-data-coalition/django-calaccess-processed-data,california-civic-data-coalition/django-calaccess-processed-data | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
Add missing models to __all__ list | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'MonetaryContribution',
'MonetaryContributionVersion',
'NonMonetaryContribution',
'NonMonetaryContributionVersion',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
<commit_msg>Add missing models to __all__ list<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'MonetaryContribution',
'MonetaryContributionVersion',
'NonMonetaryContribution',
'NonMonetaryContributionVersion',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
Add missing models to __all__ list#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'MonetaryContribution',
'MonetaryContributionVersion',
'NonMonetaryContribution',
'NonMonetaryContributionVersion',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
<commit_msg>Add missing models to __all__ list<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import all of the models from submodules and thread them together.
"""
from calaccess_processed.models.campaign.entities import (
Candidate,
CandidateCommittee,
)
from calaccess_processed.models.campaign.filings import (
Form460,
Form460Version,
Schedule497,
Schedule497Version,
)
from calaccess_processed.models.campaign.contributions import (
MonetaryContribution,
MonetaryContributionVersion,
NonMonetaryContribution,
NonMonetaryContributionVersion,
LateContributionReceived,
LateContributionReceivedVersion,
LateContributionMade,
LateContributionMadeVersion,
)
from calaccess_processed.models.common import (
FilerIDValue,
FilingIDValue,
)
from calaccess_processed.models.tracking import (
ProcessedDataVersion,
ProcessedDataFile,
)
__all__ = (
'ProcessedDataVersion',
'ProcessedDataFile',
'Candidate',
'CandidateCommittee',
'Form460',
'Form460Version',
'Schedule497',
'Schedule497Version',
'MonetaryContribution',
'MonetaryContributionVersion',
'NonMonetaryContribution',
'NonMonetaryContributionVersion',
'LateContributionReceived',
'LateContributionReceivedVersion',
'LateContributionMade',
'LateContributionMadeVersion',
'FilerIDValue',
'FilingIDValue',
)
|
64dd3ecb1cf5adbf68d59d231b67e7d30ace715f | scripts/create-hosted-graphite-dashboards.py | scripts/create-hosted-graphite-dashboards.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
To add
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This pushes dashboards defined in /grafana directory up to our hosted graphite.
You can get the <hosted_graphite_api_key> from the "Account Home" page of hosted graphite.
To change a dashboard a process you should follow is:
1. Set "editable": true in one our existing JSON files
2. Change the title (to avoid overwriting the existing dashboard as you try things out)
3. Run this script
4. Find the newly created dashboard in hosted graphite and make changes
5. Export the JSON for the new dashboard (click settings cog, "View JSON") and overwrite the existing JSON file
6. Set "editable": false and set the title back to the original one. Remove the new "id" key from the exported JSON
7. Run this script again
8. Check you're happy with the new dashboard
9. Delete the editable "test" dashboard you created in step 3 (click settings cog, "Delete dashboard")
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
| Update docstring for creating hosted graphite dashboards | Update docstring for creating hosted graphite dashboards
| Python | mit | alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
To add
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
Update docstring for creating hosted graphite dashboards | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This pushes dashboards defined in /grafana directory up to our hosted graphite.
You can get the <hosted_graphite_api_key> from the "Account Home" page of hosted graphite.
To change a dashboard a process you should follow is:
1. Set "editable": true in one our existing JSON files
2. Change the title (to avoid overwriting the existing dashboard as you try things out)
3. Run this script
4. Find the newly created dashboard in hosted graphite and make changes
5. Export the JSON for the new dashboard (click settings cog, "View JSON") and overwrite the existing JSON file
6. Set "editable": false and set the title back to the original one. Remove the new "id" key from the exported JSON
7. Run this script again
8. Check you're happy with the new dashboard
9. Delete the editable "test" dashboard you created in step 3 (click settings cog, "Delete dashboard")
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
To add
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
<commit_msg>Update docstring for creating hosted graphite dashboards<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This pushes dashboards defined in /grafana directory up to our hosted graphite.
You can get the <hosted_graphite_api_key> from the "Account Home" page of hosted graphite.
To change a dashboard a process you should follow is:
1. Set "editable": true in one our existing JSON files
2. Change the title (to avoid overwriting the existing dashboard as you try things out)
3. Run this script
4. Find the newly created dashboard in hosted graphite and make changes
5. Export the JSON for the new dashboard (click settings cog, "View JSON") and overwrite the existing JSON file
6. Set "editable": false and set the title back to the original one. Remove the new "id" key from the exported JSON
7. Run this script again
8. Check you're happy with the new dashboard
9. Delete the editable "test" dashboard you created in step 3 (click settings cog, "Delete dashboard")
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
To add
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
Update docstring for creating hosted graphite dashboards#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This pushes dashboards defined in /grafana directory up to our hosted graphite.
You can get the <hosted_graphite_api_key> from the "Account Home" page of hosted graphite.
To change a dashboard a process you should follow is:
1. Set "editable": true in one our existing JSON files
2. Change the title (to avoid overwriting the existing dashboard as you try things out)
3. Run this script
4. Find the newly created dashboard in hosted graphite and make changes
5. Export the JSON for the new dashboard (click settings cog, "View JSON") and overwrite the existing JSON file
6. Set "editable": false and set the title back to the original one. Remove the new "id" key from the exported JSON
7. Run this script again
8. Check you're happy with the new dashboard
9. Delete the editable "test" dashboard you created in step 3 (click settings cog, "Delete dashboard")
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
To add
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
<commit_msg>Update docstring for creating hosted graphite dashboards<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This pushes dashboards defined in /grafana directory up to our hosted graphite.
You can get the <hosted_graphite_api_key> from the "Account Home" page of hosted graphite.
To change a dashboard a process you should follow is:
1. Set "editable": true in one our existing JSON files
2. Change the title (to avoid overwriting the existing dashboard as you try things out)
3. Run this script
4. Find the newly created dashboard in hosted graphite and make changes
5. Export the JSON for the new dashboard (click settings cog, "View JSON") and overwrite the existing JSON file
6. Set "editable": false and set the title back to the original one. Remove the new "id" key from the exported JSON
7. Run this script again
8. Check you're happy with the new dashboard
9. Delete the editable "test" dashboard you created in step 3 (click settings cog, "Delete dashboard")
Usage:
scripts/create-hosted-graphite-dashboards.py <hosted_graphite_api_key>
Example:
scripts/create-hosted-graphite-dashboards.py apikey
"""
import os
import sys
import requests
from docopt import docopt
sys.path.insert(0, '.') # noqa
def generate_dashboards(api_key):
endpoint = "https://api.hostedgraphite.com/api/v2/grafana/dashboards/"
path = os.path.join(os.path.dirname(__file__), "../grafana/")
for filename in os.listdir(path):
with open(path + filename) as fp:
resp = requests.put(endpoint, auth=(api_key, ''), data=fp.read())
resp.raise_for_status()
if __name__ == "__main__":
arguments = docopt(__doc__)
generate_dashboards(arguments['<hosted_graphite_api_key>'])
|
08aa975b100b1dbf8c9594a2e57368ad866f98a4 | api/caching/tasks.py | api/caching/tasks.py | import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from api.base import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
| import urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
| Move from celery task to regular-old function | Move from celery task to regular-old function
| Python | apache-2.0 | sloria/osf.io,cwisecarver/osf.io,mattclark/osf.io,emetsger/osf.io,SSJohns/osf.io,erinspace/osf.io,asanfilippo7/osf.io,mluo613/osf.io,mluo613/osf.io,caseyrollins/osf.io,saradbowman/osf.io,cwisecarver/osf.io,icereval/osf.io,doublebits/osf.io,brianjgeiger/osf.io,mluo613/osf.io,adlius/osf.io,adlius/osf.io,aaxelb/osf.io,binoculars/osf.io,sloria/osf.io,DanielSBrown/osf.io,hmoco/osf.io,samchrisinger/osf.io,mattclark/osf.io,mluke93/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,mluo613/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,caneruguz/osf.io,zamattiac/osf.io,binoculars/osf.io,chrisseto/osf.io,zamattiac/osf.io,doublebits/osf.io,zachjanicki/osf.io,aaxelb/osf.io,doublebits/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,mluke93/osf.io,chennan47/osf.io,rdhyee/osf.io,RomanZWang/osf.io,leb2dg/osf.io,mfraezz/osf.io,crcresearch/osf.io,acshi/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,icereval/osf.io,Johnetordoff/osf.io,acshi/osf.io,adlius/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,aaxelb/osf.io,SSJohns/osf.io,mluke93/osf.io,chrisseto/osf.io,amyshi188/osf.io,amyshi188/osf.io,chrisseto/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,baylee-d/osf.io,billyhunt/osf.io,alexschiller/osf.io,mluo613/osf.io,jnayak1/osf.io,billyhunt/osf.io,abought/osf.io,kwierman/osf.io,mattclark/osf.io,abought/osf.io,pattisdr/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,cslzchen/osf.io,doublebits/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,abought/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,jnayak1/osf.io,wearpants/osf.io,kch8qx/osf.io,rdhyee/osf.io,emetsger/osf.io,binoculars/osf.io,TomBaxter/osf.io,kwierman/osf.io,alexschiller/osf.io,erinspace/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,kwierman/osf.io,sloria/osf.io,icereval/osf.io,TomHeatwole/osf.io,kwierman/osf.io,emetsger/osf.io,saradbowman/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,zamattiac/osf.io,amyshi188/osf.io,wearpants/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,acshi/osf.io,acshi/osf.io,rdhyee/osf.io,felliott/osf.io,alexschiller/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,acshi/osf.io,laurenrevere/osf.io,mfraezz/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,emetsger/osf.io,jnayak1/osf.io,hmoco/osf.io,chennan47/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,caseyrollins/osf.io,caneruguz/osf.io,adlius/osf.io,doublebits/osf.io,caneruguz/osf.io,alexschiller/osf.io,laurenrevere/osf.io,cslzchen/osf.io,felliott/osf.io,wearpants/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,amyshi188/osf.io,hmoco/osf.io,baylee-d/osf.io,samchrisinger/osf.io,SSJohns/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,mfraezz/osf.io,samchrisinger/osf.io,abought/osf.io,kch8qx/osf.io,zamattiac/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,mluke93/osf.io,chrisseto/osf.io,billyhunt/osf.io,SSJohns/osf.io,pattisdr/osf.io,jnayak1/osf.io,felliott/osf.io,hmoco/osf.io,rdhyee/osf.io,crcresearch/osf.io,Nesiehr/osf.io,leb2dg/osf.io | import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from api.base import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
Move from celery task to regular-old function | import urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
| <commit_before>import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from api.base import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
<commit_msg>Move from celery task to regular-old function<commit_after> | import urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
| import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from api.base import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
Move from celery task to regular-old functionimport urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
| <commit_before>import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from api.base import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
<commit_msg>Move from celery task to regular-old function<commit_after>import urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
|
13d9edb6d643d0955663f78b610fee6106e7128e | KISSmetrics/client.py | KISSmetrics/client.py | # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ['http', 'https']:
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
| # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ('http', 'https'):
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
| Use a tuple instead of a list for protocol membership test | Use a tuple instead of a list for protocol membership test
| Python | mit | kissmetrics/py-KISSmetrics | # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ['http', 'https']:
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
Use a tuple instead of a list for protocol membership test | # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ('http', 'https'):
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
| <commit_before># -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ['http', 'https']:
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
<commit_msg>Use a tuple instead of a list for protocol membership test<commit_after> | # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ('http', 'https'):
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
| # -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ['http', 'https']:
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
Use a tuple instead of a list for protocol membership test# -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ('http', 'https'):
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
| <commit_before># -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ['http', 'https']:
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
<commit_msg>Use a tuple instead of a list for protocol membership test<commit_after># -*- coding: utf-8 -*-
import KISSmetrics
from KISSmetrics import request
from urllib3 import PoolManager
class Client:
def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
trk_proto=KISSmetrics.TRACKING_PROTOCOL):
self.key = key
if trk_proto not in ('http', 'https'):
raise ValueError('trk_proto must be one of (http, https)')
self.http = PoolManager()
self.trk_host = trk_host
self.trk_proto = trk_proto
def request(self, query, method="GET"):
url = '%s://%s/%s' % (self.trk_proto, self.trk_host, query)
return self.http.request(method, url)
def record(self, person, event, properties=None, timestamp=None,
uri=KISSmetrics.RECORD_URI):
this_request = request.record(self.key, person, event,
timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def set(self, person, properties=None, timestamp=None,
uri=KISSmetrics.SET_URI):
this_request = request.set(self.key, person, timestamp=timestamp,
properties=properties, uri=uri)
return self.request(this_request)
def alias(self, person, identity, uri=KISSmetrics.ALIAS_URI):
this_request = request.alias(self.key, person, identity, uri=uri)
return self.request(this_request)
|
1bba76808aa5c598f1558cd127d8ed4a006692e1 | tests/conftest.py | tests/conftest.py | import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| Make sure we import qtpy before importing any Qt wrappers directly | Make sure we import qtpy before importing any Qt wrappers directly | Python | mit | spyder-ide/qtpy,goanpeca/qtpy,davvid/qtpy,davvid/qtpy,goanpeca/qtpy | import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
Make sure we import qtpy before importing any Qt wrappers directly | import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| <commit_before>import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
<commit_msg>Make sure we import qtpy before importing any Qt wrappers directly<commit_after> | import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
Make sure we import qtpy before importing any Qt wrappers directlyimport os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| <commit_before>import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
<commit_msg>Make sure we import qtpy before importing any Qt wrappers directly<commit_after>import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
8359d60480371a8f63bdd4ea1b7cf03f231c1350 | djangopress/settings_tinymce.py | djangopress/settings_tinymce.py | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" | Update settings for tinymce to allow show_blog_latest tag | Update settings for tinymce to allow show_blog_latest tag
| Python | mit | codefisher/djangopress,codefisher/djangopress,codefisher/djangopress,codefisher/djangopress | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"Update settings for tinymce to allow show_blog_latest tag | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" | <commit_before># if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"<commit_msg>Update settings for tinymce to allow show_blog_latest tag<commit_after> | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" | # if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"Update settings for tinymce to allow show_blog_latest tag# if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" | <commit_before># if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"<commit_msg>Update settings for tinymce to allow show_blog_latest tag<commit_after># if you want support for tinymce in the admin pages
# add tinymce to the installed apps (after installing if needed)
# and then import these settings, or copy and adjust as needed
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js" |
3ca7c667cbf37499dc959b336b9ff0e88f5d4275 | dbarray/tests/run.py | dbarray/tests/run.py | """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
| """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
| Remove commented code . | Remove commented code [ci skip].
| Python | bsd-3-clause | ecometrica/django-dbarray | """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
Remove commented code [ci skip]. | """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
| <commit_before>"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
<commit_msg>Remove commented code [ci skip].<commit_after> | """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
| """From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
Remove commented code [ci skip]."""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
| <commit_before>"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
<commit_msg>Remove commented code [ci skip].<commit_after>"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
a1bf03f69b9cadddcc7e0015788f23f9bad0f862 | apps/splash/views.py | apps/splash/views.py | import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
splash_year.events = _merge_events(splash_year.splash_events.all())
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| Append event merging on splash_events | Append event merging on splash_events
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
Append event merging on splash_events | import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
splash_year.events = _merge_events(splash_year.splash_events.all())
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| <commit_before>import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
<commit_msg>Append event merging on splash_events<commit_after> | import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
splash_year.events = _merge_events(splash_year.splash_events.all())
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
Append event merging on splash_eventsimport datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
splash_year.events = _merge_events(splash_year.splash_events.all())
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| <commit_before>import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
<commit_msg>Append event merging on splash_events<commit_after>import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
splash_year.events = _merge_events(splash_year.splash_events.all())
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
|
e722dbc13dcf1c30086fb3aff9afd89d2bddb409 | validator/jschematest.py | validator/jschematest.py | # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
with open('../schema.alt.json') as f:
schema = json.load(f)
with open('../datamodel/core/chapter1.json') as f:
instance = json.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
| # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
if __name__ == "__main__":
import os
with open('../schema-chapter.json') as f:
schema = json.load(f)
v = jsonschema.Draft4Validator(schema)
for root, dirs, files in os.walk('../datamodel'):
for name in files:
if name.endswith('.json'):
print (os.path.join(root, name))
with open(os.path.join(root, name)) as f:
instance = json.load(f)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
| Make chapter validator loop over all chapter files in the knowledge model | Make chapter validator loop over all chapter files in the knowledge model
| Python | apache-2.0 | DataStewardshipPortal/ds-km,CCMi-FIT/ds-km,CCMi-FIT/ds-km-core | # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
with open('../schema.alt.json') as f:
schema = json.load(f)
with open('../datamodel/core/chapter1.json') as f:
instance = json.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
Make chapter validator loop over all chapter files in the knowledge model | # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
if __name__ == "__main__":
import os
with open('../schema-chapter.json') as f:
schema = json.load(f)
v = jsonschema.Draft4Validator(schema)
for root, dirs, files in os.walk('../datamodel'):
for name in files:
if name.endswith('.json'):
print (os.path.join(root, name))
with open(os.path.join(root, name)) as f:
instance = json.load(f)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
| <commit_before># Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
with open('../schema.alt.json') as f:
schema = json.load(f)
with open('../datamodel/core/chapter1.json') as f:
instance = json.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
<commit_msg>Make chapter validator loop over all chapter files in the knowledge model<commit_after> | # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
if __name__ == "__main__":
import os
with open('../schema-chapter.json') as f:
schema = json.load(f)
v = jsonschema.Draft4Validator(schema)
for root, dirs, files in os.walk('../datamodel'):
for name in files:
if name.endswith('.json'):
print (os.path.join(root, name))
with open(os.path.join(root, name)) as f:
instance = json.load(f)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
| # Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
with open('../schema.alt.json') as f:
schema = json.load(f)
with open('../datamodel/core/chapter1.json') as f:
instance = json.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
Make chapter validator loop over all chapter files in the knowledge model# Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
if __name__ == "__main__":
import os
with open('../schema-chapter.json') as f:
schema = json.load(f)
v = jsonschema.Draft4Validator(schema)
for root, dirs, files in os.walk('../datamodel'):
for name in files:
if name.endswith('.json'):
print (os.path.join(root, name))
with open(os.path.join(root, name)) as f:
instance = json.load(f)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
| <commit_before># Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
with open('../schema.alt.json') as f:
schema = json.load(f)
with open('../datamodel/core/chapter1.json') as f:
instance = json.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
<commit_msg>Make chapter validator loop over all chapter files in the knowledge model<commit_after># Testing jsonschema error printing capabilities
import jsonschema
import json
# Custom printing of errors (skip no-information messages)
# TODO: solve repetition of same error (with different paths)
def print_errors(errors, indent=0):
next_indent = indent + 2
for error in errors:
msg = error.message
print(' '*indent, end='')
if error.validator in ['anyOf', 'oneOf', 'allOf']:
if 'questionid' in error.instance:
qid = error.instance['questionid']
print('Question with ID {} is not valid:'.format(qid))
elif 'chapterid' in error.instance:
chid = error.instance['chapterid']
print('Chapter with ID {} is not valid:'.format(chid))
else:
print(msg)
else:
print(msg)
new_errors = sorted(error.context, key=lambda e: e.schema_path)
print_errors(new_errors, next_indent)
if __name__ == "__main__":
import os
with open('../schema-chapter.json') as f:
schema = json.load(f)
v = jsonschema.Draft4Validator(schema)
for root, dirs, files in os.walk('../datamodel'):
for name in files:
if name.endswith('.json'):
print (os.path.join(root, name))
with open(os.path.join(root, name)) as f:
instance = json.load(f)
errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
print_errors(errors)
|
1f4fea5d4bb67f84defa1693e0ea26295de489ff | helios/conf/settings.py | helios/conf/settings.py | # -*- coding: utf-8 -*-
from django.conf import settings
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
| # -*- coding: utf-8 -*-
from django.conf import settings
DEBUG = getattr(settings, 'STORE_DEBUG', False)
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
| Set the DEBUG store setting | Set the DEBUG store setting
| Python | bsd-3-clause | panosl/helios | # -*- coding: utf-8 -*-
from django.conf import settings
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
Set the DEBUG store setting | # -*- coding: utf-8 -*-
from django.conf import settings
DEBUG = getattr(settings, 'STORE_DEBUG', False)
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
| <commit_before># -*- coding: utf-8 -*-
from django.conf import settings
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
<commit_msg>Set the DEBUG store setting<commit_after> | # -*- coding: utf-8 -*-
from django.conf import settings
DEBUG = getattr(settings, 'STORE_DEBUG', False)
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
| # -*- coding: utf-8 -*-
from django.conf import settings
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
Set the DEBUG store setting# -*- coding: utf-8 -*-
from django.conf import settings
DEBUG = getattr(settings, 'STORE_DEBUG', False)
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
| <commit_before># -*- coding: utf-8 -*-
from django.conf import settings
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
<commit_msg>Set the DEBUG store setting<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
DEBUG = getattr(settings, 'STORE_DEBUG', False)
PAGINATE_BY = getattr(settings, 'STORE_PAGINATE_BY', 50)
IS_MULTILINGUAL = getattr(settings, 'STORE_IS_MULTILINGUAL', False)
HAS_CURRENCIES = getattr(settings, 'STORE_HAS_CURRENCIES', False)
USE_PAYPAL = getattr(settings, 'STORE_USE_PAYPAL', False)
PRODUCT_MODEL = getattr(settings, 'STORE_PRODUCT_MODEL', 'helios.store.models.Product')
CART = getattr(settings, 'STORE_CART', 'helios.store.cart')
|
61b59bfdf9581a280263f4049e7d257fa47cdad0 | teknologr/registration/views.py | teknologr/registration/views.py | from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
return render(request, self.template, self.context)
| from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def get(self, request, **kwargs):
previous_context = request.session.pop('context', None)
if not previous_context:
return redirect('registration.views.home')
return render(request, self.template, previous_context)
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
request.session['context'] = self.context
return redirect('registration.views.submit')
| Fix registration form replay issue | Fix registration form replay issue
| Python | mit | Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io | from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
return render(request, self.template, self.context)
Fix registration form replay issue | from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def get(self, request, **kwargs):
previous_context = request.session.pop('context', None)
if not previous_context:
return redirect('registration.views.home')
return render(request, self.template, previous_context)
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
request.session['context'] = self.context
return redirect('registration.views.submit')
| <commit_before>from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
return render(request, self.template, self.context)
<commit_msg>Fix registration form replay issue<commit_after> | from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def get(self, request, **kwargs):
previous_context = request.session.pop('context', None)
if not previous_context:
return redirect('registration.views.home')
return render(request, self.template, previous_context)
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
request.session['context'] = self.context
return redirect('registration.views.submit')
| from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
return render(request, self.template, self.context)
Fix registration form replay issuefrom django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def get(self, request, **kwargs):
previous_context = request.session.pop('context', None)
if not previous_context:
return redirect('registration.views.home')
return render(request, self.template, previous_context)
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
request.session['context'] = self.context
return redirect('registration.views.submit')
| <commit_before>from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
return render(request, self.template, self.context)
<commit_msg>Fix registration form replay issue<commit_after>from django.shortcuts import render, redirect
from django.conf import settings
from django.views import View
from members.programmes import DEGREE_PROGRAMME_CHOICES
from registration.forms import RegistrationForm
from registration.mailutils import mailApplicantSubmission
class BaseView(View):
context = {'DEBUG': settings.DEBUG}
class HomeView(BaseView):
template = 'registration.html'
def get(self, request):
self.context['programmes'] = DEGREE_PROGRAMME_CHOICES
self.context['form'] = RegistrationForm()
return render(request, self.template, self.context)
class SubmitView(BaseView):
template = 'submit.html'
def get(self, request, **kwargs):
previous_context = request.session.pop('context', None)
if not previous_context:
return redirect('registration.views.home')
return render(request, self.template, previous_context)
def post(self, request):
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.instance
self.context['name'] = registration.preferred_name or registration.given_names.split(' ')[0]
self.context['email'] = registration.email
# FIXME: handle situation where email is not sent (e.g. admin log tool)
mailApplicantSubmission(self.context)
registration.save()
else:
self.context['form'] = form
return render(request, HomeView.template, self.context, status=400)
request.session['context'] = self.context
return redirect('registration.views.submit')
|
3fad3c64a317956265c14c82c182b66979ba8554 | greatbigcrane/preferences/forms.py | greatbigcrane/preferences/forms.py | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
| """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
def clean(self):
# Apparently clean__projects_directory doesn't work with dynamic forms
if not self.cleaned_data['projects_directory'].endswith(os.path.sep):
self.cleaned_data['projects_directory'] += os.path.sep
return self.cleaned_data
| Append a trailing newline to the project directory | Append a trailing newline to the project directory
| Python | apache-2.0 | pnomolos/greatbigcrane,pnomolos/greatbigcrane | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
Append a trailing newline to the project directory | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
def clean(self):
# Apparently clean__projects_directory doesn't work with dynamic forms
if not self.cleaned_data['projects_directory'].endswith(os.path.sep):
self.cleaned_data['projects_directory'] += os.path.sep
return self.cleaned_data
| <commit_before>"""
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
<commit_msg>Append a trailing newline to the project directory<commit_after> | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
def clean(self):
# Apparently clean__projects_directory doesn't work with dynamic forms
if not self.cleaned_data['projects_directory'].endswith(os.path.sep):
self.cleaned_data['projects_directory'] += os.path.sep
return self.cleaned_data
| """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
Append a trailing newline to the project directory"""
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
def clean(self):
# Apparently clean__projects_directory doesn't work with dynamic forms
if not self.cleaned_data['projects_directory'].endswith(os.path.sep):
self.cleaned_data['projects_directory'] += os.path.sep
return self.cleaned_data
| <commit_before>"""
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
<commit_msg>Append a trailing newline to the project directory<commit_after>"""
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from django import forms
from preferences.models import Preference
class PreferencesForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreferencesForm, self).__init__(*args, **kwargs)
for preference in Preference.objects.all():
self.fields[preference.name] = forms.CharField()
def clean(self):
# Apparently clean__projects_directory doesn't work with dynamic forms
if not self.cleaned_data['projects_directory'].endswith(os.path.sep):
self.cleaned_data['projects_directory'] += os.path.sep
return self.cleaned_data
|
62454216b7d0426c23d75ba4aafd761093447e63 | compiler.py | compiler.py | #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
| #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
parser.add_argument("--js_externs", default='', help="Declare some external js vars and functions separated with semicolon")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
('js_externs', args.js_externs),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
| Add js_externs parameter for API | Add js_externs parameter for API
| Python | mit | femtopixel/docker-google-closure-compiler-api,femtopixel/docker-google-closure-compiler-api | #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
Add js_externs parameter for API | #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
parser.add_argument("--js_externs", default='', help="Declare some external js vars and functions separated with semicolon")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
('js_externs', args.js_externs),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
| <commit_before>#!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
<commit_msg>Add js_externs parameter for API<commit_after> | #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
parser.add_argument("--js_externs", default='', help="Declare some external js vars and functions separated with semicolon")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
('js_externs', args.js_externs),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
| #!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
Add js_externs parameter for API#!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
parser.add_argument("--js_externs", default='', help="Declare some external js vars and functions separated with semicolon")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
('js_externs', args.js_externs),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
| <commit_before>#!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
<commit_msg>Add js_externs parameter for API<commit_after>#!/usr/local/bin/python3
import http.client, urllib.parse, sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--js", default='/dev/stdin', help="Input file")
parser.add_argument("--js_output_file", default='/dev/stdout', help="Output file")
parser.add_argument("--compilation_level", default='WHITESPACE_ONLY', choices=['WHITESPACE_ONLY', 'SIMPLE_OPTIMIZATIONS', 'ADVANCED_OPTIMIZATIONS'], help="Compilation level")
parser.add_argument("--js_externs", default='', help="Declare some external js vars and functions separated with semicolon")
args = parser.parse_args()
js_code = open(args.js, 'r')
params = urllib.parse.urlencode([
('js_code', js_code.read()),
('compilation_level', args.compilation_level),
('output_format', 'text'),
('output_info', 'compiled_code'),
('js_externs', args.js_externs),
])
js_code.close()
headers = { "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8" }
conn = http.client.HTTPSConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
data = response.read()
output_code = open(args.js_output_file, 'w')
output_code.write(data.decode("utf-8"))
output_code.close()
conn.close()
if __name__ == "__main__":
main()
|
d8344f9bea9cfbc8ab22b952f223d2365de907b4 | cli_helpers/tabular_output/terminaltables_adapter.py | cli_helpers/tabular_output/terminaltables_adapter.py | # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
try:
table = table_format_handler[table_format]
except KeyError:
raise ValueError('unrecognized table format: {}'.format(table_format))
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
| # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
table = table_format_handler[table_format]
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
| Remove try/except that should never be hit. | Remove try/except that should never be hit.
Increases code coverage.
| Python | bsd-3-clause | dbcli/cli_helpers,dbcli/cli_helpers | # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
try:
table = table_format_handler[table_format]
except KeyError:
raise ValueError('unrecognized table format: {}'.format(table_format))
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
Remove try/except that should never be hit.
Increases code coverage. | # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
table = table_format_handler[table_format]
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
| <commit_before># -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
try:
table = table_format_handler[table_format]
except KeyError:
raise ValueError('unrecognized table format: {}'.format(table_format))
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
<commit_msg>Remove try/except that should never be hit.
Increases code coverage.<commit_after> | # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
table = table_format_handler[table_format]
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
| # -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
try:
table = table_format_handler[table_format]
except KeyError:
raise ValueError('unrecognized table format: {}'.format(table_format))
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
Remove try/except that should never be hit.
Increases code coverage.# -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
table = table_format_handler[table_format]
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
| <commit_before># -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
try:
table = table_format_handler[table_format]
except KeyError:
raise ValueError('unrecognized table format: {}'.format(table_format))
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
<commit_msg>Remove try/except that should never be hit.
Increases code coverage.<commit_after># -*- coding: utf-8 -*-
"""Format adapter for the terminaltables module."""
import terminaltables
from cli_helpers.utils import filter_dict_by_key
from .preprocessors import (bytes_to_string, align_decimals,
override_missing_value)
supported_formats = ('ascii', 'double', 'github')
preprocessors = (bytes_to_string, override_missing_value, align_decimals)
def adapter(data, headers, table_format=None, **kwargs):
"""Wrap terminaltables inside a function for TabularOutputFormatter."""
keys = ('title', )
table_format_handler = {
'ascii': terminaltables.AsciiTable,
'double': terminaltables.DoubleTable,
'github': terminaltables.GithubFlavoredMarkdownTable,
}
table = table_format_handler[table_format]
t = table([headers] + data, **filter_dict_by_key(kwargs, keys))
return t.table
|
9a58d241e61301b9390b17e391e4b65a3ea85071 | squadron/libraries/apt/__init__.py | squadron/libraries/apt/__init__.py | import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) #Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
#Something else happened, we weren't installed and we didn't get installed
failed.append(package)
print out
return failed
| import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) # Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
# Something else happened, we weren't installed and we didn't get installed
failed.append(package)
return failed
| Remove extra print in apt | Remove extra print in apt
| Python | mit | gosquadron/squadron,gosquadron/squadron | import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) #Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
#Something else happened, we weren't installed and we didn't get installed
failed.append(package)
print out
return failed
Remove extra print in apt | import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) # Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
# Something else happened, we weren't installed and we didn't get installed
failed.append(package)
return failed
| <commit_before>import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) #Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
#Something else happened, we weren't installed and we didn't get installed
failed.append(package)
print out
return failed
<commit_msg>Remove extra print in apt<commit_after> | import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) # Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
# Something else happened, we weren't installed and we didn't get installed
failed.append(package)
return failed
| import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) #Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
#Something else happened, we weren't installed and we didn't get installed
failed.append(package)
print out
return failed
Remove extra print in aptimport os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) # Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
# Something else happened, we weren't installed and we didn't get installed
failed.append(package)
return failed
| <commit_before>import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) #Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
#Something else happened, we weren't installed and we didn't get installed
failed.append(package)
print out
return failed
<commit_msg>Remove extra print in apt<commit_after>import os
import subprocess
from string import find
def run_command(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out,err
def schema():
"""
This returns
"""
return { 'title': 'apt schema',
'type': 'string'
}
def verify(inputhashes):
"""
"""
failed = []
for package in inputhashes:
out = run_command(['dpkg-query', '-W', package])[0]
#We expect the output to contain the version
#Any error doesn't get captured, so out will be empty (yes this is weird)
if(find(out, package) == -1):
failed.append(package)
return failed
def apply(inputhashes, dry_run=True):
failed = []
for package in inputhashes:
out = run_command(['apt-get', 'install', '-y', package])
if(find(out[1], 'Permission denied') != -1):
failed.append(package) # Install failed because we're not root
if(find(out[0], ('Setting up ' + package)) != -1 and find(out[0], (package + ' already the newest version')) != -1):
# Something else happened, we weren't installed and we didn't get installed
failed.append(package)
return failed
|
aae84224b5a2d1689b1739da319d140474702c96 | zuice/bindings.py | zuice/bindings.py | class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
return Binder(key, self._bindings)
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._bound = False
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
if self._bound:
raise AlreadyBoundException()
self._bound = True
self._bindings[self._key] = provider
class AlreadyBoundException(Exception):
pass
| class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key, provider=None):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
if provider is None:
return Binder(key, self)
else:
self._bindings[key] = provider
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
self._bindings.bind(self._key, provider)
class AlreadyBoundException(Exception):
pass
| Simplify detection of keys already bound | Simplify detection of keys already bound
| Python | bsd-2-clause | mwilliamson/zuice | class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
return Binder(key, self._bindings)
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._bound = False
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
if self._bound:
raise AlreadyBoundException()
self._bound = True
self._bindings[self._key] = provider
class AlreadyBoundException(Exception):
pass
Simplify detection of keys already bound | class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key, provider=None):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
if provider is None:
return Binder(key, self)
else:
self._bindings[key] = provider
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
self._bindings.bind(self._key, provider)
class AlreadyBoundException(Exception):
pass
| <commit_before>class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
return Binder(key, self._bindings)
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._bound = False
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
if self._bound:
raise AlreadyBoundException()
self._bound = True
self._bindings[self._key] = provider
class AlreadyBoundException(Exception):
pass
<commit_msg>Simplify detection of keys already bound<commit_after> | class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key, provider=None):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
if provider is None:
return Binder(key, self)
else:
self._bindings[key] = provider
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
self._bindings.bind(self._key, provider)
class AlreadyBoundException(Exception):
pass
| class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
return Binder(key, self._bindings)
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._bound = False
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
if self._bound:
raise AlreadyBoundException()
self._bound = True
self._bindings[self._key] = provider
class AlreadyBoundException(Exception):
pass
Simplify detection of keys already boundclass Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key, provider=None):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
if provider is None:
return Binder(key, self)
else:
self._bindings[key] = provider
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
self._bindings.bind(self._key, provider)
class AlreadyBoundException(Exception):
pass
| <commit_before>class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
return Binder(key, self._bindings)
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._bound = False
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
if self._bound:
raise AlreadyBoundException()
self._bound = True
self._bindings[self._key] = provider
class AlreadyBoundException(Exception):
pass
<commit_msg>Simplify detection of keys already bound<commit_after>class Bindings(object):
def __init__(self):
self._bindings = {}
def bind(self, key, provider=None):
if key in self:
raise AlreadyBoundException("Cannot rebind key: %s" % key)
if provider is None:
return Binder(key, self)
else:
self._bindings[key] = provider
def copy(self):
copy = Bindings()
copy._bindings = self._bindings.copy()
return copy
def update(self, bindings):
for key in bindings._bindings:
if key in self._bindings:
raise AlreadyBoundException("Key already bound: %s" % key)
self._bindings.update(bindings._bindings)
def __contains__(self, key):
return key in self._bindings
def __getitem__(self, key):
return self._bindings[key]
class Binder(object):
def __init__(self, key, bindings):
self._key = key
self._bindings = bindings
def to_instance(self, instance):
self.to_provider(lambda: instance)
def to_key(self, key):
if key is self._key:
raise TypeError("Cannot bind a key to itself")
self.to_provider(lambda injector: injector.get(key))
def to_type(self, key):
return self.to_key(key)
def to_provider(self, provider):
self._bindings.bind(self._key, provider)
class AlreadyBoundException(Exception):
pass
|
66212e51341562f156353a0ae195d15b0d22b21b | scripts/import.py | scripts/import.py | #!/usr/bin/env python
import sys
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def post_file(file_path):
with open(file_path) as f:
response = requests.post(
endpoint,
data='{"services":%s}' % f.read(),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(post_file, list_files(listing_dir)):
print(result)
| #!/usr/bin/env python
import sys
import json
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def put_file(file_path):
with open(file_path) as f:
data = json.load(f)
data = {'services': data}
url = '{}/{}'.format(endpoint, data['services']['id'])
response = requests.put(
url,
data=json.dumps(data),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(put_file, list_files(listing_dir)):
print(result)
| PUT services rather than POSTing them | PUT services rather than POSTing them
| Python | mit | RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api | #!/usr/bin/env python
import sys
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def post_file(file_path):
with open(file_path) as f:
response = requests.post(
endpoint,
data='{"services":%s}' % f.read(),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(post_file, list_files(listing_dir)):
print(result)
PUT services rather than POSTing them | #!/usr/bin/env python
import sys
import json
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def put_file(file_path):
with open(file_path) as f:
data = json.load(f)
data = {'services': data}
url = '{}/{}'.format(endpoint, data['services']['id'])
response = requests.put(
url,
data=json.dumps(data),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(put_file, list_files(listing_dir)):
print(result)
| <commit_before>#!/usr/bin/env python
import sys
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def post_file(file_path):
with open(file_path) as f:
response = requests.post(
endpoint,
data='{"services":%s}' % f.read(),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(post_file, list_files(listing_dir)):
print(result)
<commit_msg>PUT services rather than POSTing them<commit_after> | #!/usr/bin/env python
import sys
import json
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def put_file(file_path):
with open(file_path) as f:
data = json.load(f)
data = {'services': data}
url = '{}/{}'.format(endpoint, data['services']['id'])
response = requests.put(
url,
data=json.dumps(data),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(put_file, list_files(listing_dir)):
print(result)
| #!/usr/bin/env python
import sys
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def post_file(file_path):
with open(file_path) as f:
response = requests.post(
endpoint,
data='{"services":%s}' % f.read(),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(post_file, list_files(listing_dir)):
print(result)
PUT services rather than POSTing them#!/usr/bin/env python
import sys
import json
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def put_file(file_path):
with open(file_path) as f:
data = json.load(f)
data = {'services': data}
url = '{}/{}'.format(endpoint, data['services']['id'])
response = requests.put(
url,
data=json.dumps(data),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(put_file, list_files(listing_dir)):
print(result)
| <commit_before>#!/usr/bin/env python
import sys
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def post_file(file_path):
with open(file_path) as f:
response = requests.post(
endpoint,
data='{"services":%s}' % f.read(),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(post_file, list_files(listing_dir)):
print(result)
<commit_msg>PUT services rather than POSTing them<commit_after>#!/usr/bin/env python
import sys
import json
import os
import requests
import multiprocessing
def list_files(directory):
for root, subdirs, files in os.walk(directory):
print("ROOT: {}".format(root))
for file in files:
yield os.path.abspath(os.path.join(root, file))
for subdir in subdirs:
for subfile in list_files(subdir):
yield subfile
if __name__ == "__main__":
_, base_url, access_token, listing_dir = sys.argv
endpoint = "{}/services".format(base_url)
print("Base URL: {}".format(base_url))
print("Access token: {}".format(access_token))
print("Listing dir: {}".format(listing_dir))
def put_file(file_path):
with open(file_path) as f:
data = json.load(f)
data = {'services': data}
url = '{}/{}'.format(endpoint, data['services']['id'])
response = requests.put(
url,
data=json.dumps(data),
headers={
"content-type": "application/json",
"authorization": "Bearer {}".format(access_token),
})
return response
pool = multiprocessing.Pool(10)
for result in pool.imap(put_file, list_files(listing_dir)):
print(result)
|
95b08a7cb2d473c25c1d326b0394336955b47af4 | appy/models.py | appy/models.py | from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
def __unicode__(self):
return self.description
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
def __unicode__(self):
return u'%s at %s' % (self.job_title, self.company)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| Add unicode representations for tags/positions | Add unicode representations for tags/positions
| Python | mit | merdey/ApPy,merdey/ApPy | from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
Add unicode representations for tags/positions | from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
def __unicode__(self):
return self.description
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
def __unicode__(self):
return u'%s at %s' % (self.job_title, self.company)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| <commit_before>from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
<commit_msg>Add unicode representations for tags/positions<commit_after> | from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
def __unicode__(self):
return self.description
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
def __unicode__(self):
return u'%s at %s' % (self.job_title, self.company)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
Add unicode representations for tags/positionsfrom django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
def __unicode__(self):
return self.description
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
def __unicode__(self):
return u'%s at %s' % (self.job_title, self.company)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| <commit_before>from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
<commit_msg>Add unicode representations for tags/positions<commit_after>from django.db import models
from django.contrib.auth.models import User
class Tag(models.Model):
description = models.TextField()
def __unicode__(self):
return self.description
class Position(models.Model):
company = models.TextField()
job_title = models.TextField()
description = models.TextField()
tags = models.ManyToManyField(Tag)
def __unicode__(self):
return u'%s at %s' % (self.job_title, self.company)
class Application(models.Model):
user = models.ForeignKey(User)
position = models.ForeignKey(Position)
APPLIED = 'APP'
REJECTED = 'REJ'
INTERVIEWING = 'INT'
NEGOTIATING = 'NEG'
STATUS_CHOICES = (
(APPLIED, 'Applied'),
(REJECTED, 'Rejected'),
(INTERVIEWING, 'Interviewing'),
(NEGOTIATING, 'Negotiating'),
)
status = models.CharField(max_length=3, choices=STATUS_CHOICES, default=APPLIED)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
|
2b43ab4eb41e305c5bdadf5c338e134e5569249d | tests/conftest.py | tests/conftest.py | # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
| # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
def session_teardown():
"""Tear down testing session"""
print('Tearing down testing session')
os.chdir(BASEDIR) # so html coverage report lands in correct directory
@pytest.fixture(scope='session', autouse=True)
def session_setup(request):
"""Set up testing session"""
print('Setting up testing session')
request.addfinalizer(session_teardown)
| Add session setup and teardown fixtures. | Add session setup and teardown fixtures. | Python | mit | bilderbuchi/ofStateManager | # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
Add session setup and teardown fixtures. | # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
def session_teardown():
"""Tear down testing session"""
print('Tearing down testing session')
os.chdir(BASEDIR) # so html coverage report lands in correct directory
@pytest.fixture(scope='session', autouse=True)
def session_setup(request):
"""Set up testing session"""
print('Setting up testing session')
request.addfinalizer(session_teardown)
| <commit_before># pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
<commit_msg>Add session setup and teardown fixtures.<commit_after> | # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
def session_teardown():
"""Tear down testing session"""
print('Tearing down testing session')
os.chdir(BASEDIR) # so html coverage report lands in correct directory
@pytest.fixture(scope='session', autouse=True)
def session_setup(request):
"""Set up testing session"""
print('Setting up testing session')
request.addfinalizer(session_teardown)
| # pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
Add session setup and teardown fixtures.# pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
def session_teardown():
"""Tear down testing session"""
print('Tearing down testing session')
os.chdir(BASEDIR) # so html coverage report lands in correct directory
@pytest.fixture(scope='session', autouse=True)
def session_setup(request):
"""Set up testing session"""
print('Setting up testing session')
request.addfinalizer(session_teardown)
| <commit_before># pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
<commit_msg>Add session setup and teardown fixtures.<commit_after># pylint: disable=C0111
import pytest
import os
import tarfile
BASEDIR = os.path.dirname(__file__)
@pytest.fixture(autouse=False)
def set_up(tmpdir):
# print BASEDIR
tmpdir.chdir()
tar = tarfile.open(os.path.join(BASEDIR, "MockRepos.tar.gz"))
tar.extractall()
tar.close()
os.chdir('MockRepos')
print('In directory ' + os.getcwd())
# does not need teardown, since tmpdir directories get autodeleted
def session_teardown():
"""Tear down testing session"""
print('Tearing down testing session')
os.chdir(BASEDIR) # so html coverage report lands in correct directory
@pytest.fixture(scope='session', autouse=True)
def session_setup(request):
"""Set up testing session"""
print('Setting up testing session')
request.addfinalizer(session_teardown)
|
98bded02b1b5116db640f5e58f73920108af0b0c | tests/test_set.py | tests/test_set.py | import matplotlib
import fishbowl
original = []
updated = [r'\usepackage{mathspec}',
r'\setallmainfonts(Digits,Latin,Greek){Arbitrary}']
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['pgf.preamble'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['pgf.preamble'] == original
| import matplotlib
import fishbowl
original = True
updated = False
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['axes.spines.left'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['axes.spines.left'] == original
| Update tests to remove pgf.preamble | Update tests to remove pgf.preamble
| Python | mit | baxen/fishbowl | import matplotlib
import fishbowl
original = []
updated = [r'\usepackage{mathspec}',
r'\setallmainfonts(Digits,Latin,Greek){Arbitrary}']
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['pgf.preamble'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['pgf.preamble'] == original
Update tests to remove pgf.preamble | import matplotlib
import fishbowl
original = True
updated = False
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['axes.spines.left'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['axes.spines.left'] == original
| <commit_before>import matplotlib
import fishbowl
original = []
updated = [r'\usepackage{mathspec}',
r'\setallmainfonts(Digits,Latin,Greek){Arbitrary}']
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['pgf.preamble'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['pgf.preamble'] == original
<commit_msg>Update tests to remove pgf.preamble<commit_after> | import matplotlib
import fishbowl
original = True
updated = False
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['axes.spines.left'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['axes.spines.left'] == original
| import matplotlib
import fishbowl
original = []
updated = [r'\usepackage{mathspec}',
r'\setallmainfonts(Digits,Latin,Greek){Arbitrary}']
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['pgf.preamble'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['pgf.preamble'] == original
Update tests to remove pgf.preambleimport matplotlib
import fishbowl
original = True
updated = False
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['axes.spines.left'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['axes.spines.left'] == original
| <commit_before>import matplotlib
import fishbowl
original = []
updated = [r'\usepackage{mathspec}',
r'\setallmainfonts(Digits,Latin,Greek){Arbitrary}']
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['pgf.preamble'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['pgf.preamble'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['pgf.preamble'] == original
<commit_msg>Update tests to remove pgf.preamble<commit_after>import matplotlib
import fishbowl
original = True
updated = False
def test_context_set():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_context_reset():
fishbowl.reset_style()
with fishbowl.style(axes='minimal', palette='gourami', font='Arbitrary'):
pass
assert matplotlib.rcParams['axes.spines.left'] == original
def test_set():
fishbowl.reset_style()
fishbowl.set_style(font='Arbitrary')
assert matplotlib.rcParams['axes.spines.left'] == updated
def test_reset():
fishbowl.set_style(font='Arbitrary')
fishbowl.reset_style()
assert matplotlib.rcParams['axes.spines.left'] == original
|
e6d965cc36d92ee8f138d487614244c6e3deda69 | run_tests.py | run_tests.py | from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
| from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
def pytest_warning():
print "=" * 60
print "WARNING: Skipping some tests\n"
print "To run all available tests you need to install pytest"
print "eg. with libtbx.python -m pip install pytest"
print "=" * 60
pytest_warning()
import atexit
atexit.register(pytest_warning)
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
| Add warning for skipped tests if pytest not available | Add warning for skipped tests if pytest not available
| Python | bsd-3-clause | xia2/i19 | from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
Add warning for skipped tests if pytest not available | from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
def pytest_warning():
print "=" * 60
print "WARNING: Skipping some tests\n"
print "To run all available tests you need to install pytest"
print "eg. with libtbx.python -m pip install pytest"
print "=" * 60
pytest_warning()
import atexit
atexit.register(pytest_warning)
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
| <commit_before>from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
<commit_msg>Add warning for skipped tests if pytest not available<commit_after> | from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
def pytest_warning():
print "=" * 60
print "WARNING: Skipping some tests\n"
print "To run all available tests you need to install pytest"
print "eg. with libtbx.python -m pip install pytest"
print "=" * 60
pytest_warning()
import atexit
atexit.register(pytest_warning)
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
| from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
Add warning for skipped tests if pytest not availablefrom __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
def pytest_warning():
print "=" * 60
print "WARNING: Skipping some tests\n"
print "To run all available tests you need to install pytest"
print "eg. with libtbx.python -m pip install pytest"
print "=" * 60
pytest_warning()
import atexit
atexit.register(pytest_warning)
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
| <commit_before>from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
<commit_msg>Add warning for skipped tests if pytest not available<commit_after>from __future__ import division
import libtbx.load_env
def discover_pytests(module):
try:
import os
import pytest
except ImportError:
def pytest_warning():
print "=" * 60
print "WARNING: Skipping some tests\n"
print "To run all available tests you need to install pytest"
print "eg. with libtbx.python -m pip install pytest"
print "=" * 60
pytest_warning()
import atexit
atexit.register(pytest_warning)
return []
if 'LIBTBX_SKIP_PYTEST' in os.environ:
return []
test_list = []
dist_dir = libtbx.env.dist_path(module)
class TestDiscoveryPlugin:
def pytest_itemcollected(self, item):
test_list.append([ "libtbx.python", "-m", "pytest", '--noconftest',
os.path.join(dist_dir, item.nodeid) ])
print "Discovering pytest tests:"
pytest.main(['-qq', '--collect-only', '--noconftest', dist_dir], plugins=[TestDiscoveryPlugin()])
return test_list
if (__name__ == "__main__"):
import unittest
test_suite = unittest.defaultTestLoader.discover(libtbx.env.dist_path("i19"), pattern="tst_*.py")
result = unittest.TextTestRunner().run(test_suite)
import sys
sys.exit(0 if result.wasSuccessful() else 1)
tst_list = [
# "$D/tests/tst_legacy.py",
["$D/tests/tst_legacy_mult.py", "1"]
# ["$D/tests/tst_legacy_mult.py", "2"]
] + discover_pytests("i19")
|
baedff75f2b86f09368e3bd72b72e27bf887cc88 | rotational-cipher/rotational_cipher.py | rotational-cipher/rotational_cipher.py | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| Use lambda function with method | Use lambda function with method
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with method | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| <commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after> | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with methodimport string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| <commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
939ebf2eb4536fd5a6318d6cc4b55a9dc4c8def2 | documentation/compile_documentation.py | documentation/compile_documentation.py | import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
for i, code_block in enumerate(code):
print_block = '// %s : %i\n%s\n' % (file_name, i, code_block)
destination.write(print_block)
# Main Program
root_dir = '.'
destination = open('documentation.lpg', 'w')
if len(sys.argv) == 2:
root_dir = sys.argv[1]
for root, dirs, files in os.walk(root_dir):
for fi in files:
if fi.endswith('md'):
code = process_file(os.path.join(root, fi))
write_to_file(fi, code)
destination.close()
| import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
with open(file_name, 'w') as f:
f.write(code)
def write_codeblocks_to_different_files(code_blocks):
for i, code_block in enumerate(code_blocks):
write_to_file('codeblock%i.lpg' % i, code_block)
# Main Program
root_dir = '.'
if len(sys.argv) == 2:
root_dir = sys.argv[1]
if not os.path.exists(root_dir):
raise Exception('Directory does not exist')
for root, dirs, files in os.walk(root_dir):
markdown_files = [f for f in files if f.endswith('md')]
for fi in markdown_files:
code = process_file(os.path.join(root, fi))
write_codeblocks_to_different_files(code)
| Split the documentation compilation into different files | Split the documentation compilation into different files
| Python | mit | TyRoXx/Lpg,TyRoXx/Lpg,TyRoXx/Lpg,mamazu/Lpg,mamazu/Lpg,mamazu/Lpg,TyRoXx/Lpg,mamazu/Lpg,TyRoXx/Lpg | import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
for i, code_block in enumerate(code):
print_block = '// %s : %i\n%s\n' % (file_name, i, code_block)
destination.write(print_block)
# Main Program
root_dir = '.'
destination = open('documentation.lpg', 'w')
if len(sys.argv) == 2:
root_dir = sys.argv[1]
for root, dirs, files in os.walk(root_dir):
for fi in files:
if fi.endswith('md'):
code = process_file(os.path.join(root, fi))
write_to_file(fi, code)
destination.close()
Split the documentation compilation into different files | import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
with open(file_name, 'w') as f:
f.write(code)
def write_codeblocks_to_different_files(code_blocks):
for i, code_block in enumerate(code_blocks):
write_to_file('codeblock%i.lpg' % i, code_block)
# Main Program
root_dir = '.'
if len(sys.argv) == 2:
root_dir = sys.argv[1]
if not os.path.exists(root_dir):
raise Exception('Directory does not exist')
for root, dirs, files in os.walk(root_dir):
markdown_files = [f for f in files if f.endswith('md')]
for fi in markdown_files:
code = process_file(os.path.join(root, fi))
write_codeblocks_to_different_files(code)
| <commit_before>import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
for i, code_block in enumerate(code):
print_block = '// %s : %i\n%s\n' % (file_name, i, code_block)
destination.write(print_block)
# Main Program
root_dir = '.'
destination = open('documentation.lpg', 'w')
if len(sys.argv) == 2:
root_dir = sys.argv[1]
for root, dirs, files in os.walk(root_dir):
for fi in files:
if fi.endswith('md'):
code = process_file(os.path.join(root, fi))
write_to_file(fi, code)
destination.close()
<commit_msg>Split the documentation compilation into different files<commit_after> | import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
with open(file_name, 'w') as f:
f.write(code)
def write_codeblocks_to_different_files(code_blocks):
for i, code_block in enumerate(code_blocks):
write_to_file('codeblock%i.lpg' % i, code_block)
# Main Program
root_dir = '.'
if len(sys.argv) == 2:
root_dir = sys.argv[1]
if not os.path.exists(root_dir):
raise Exception('Directory does not exist')
for root, dirs, files in os.walk(root_dir):
markdown_files = [f for f in files if f.endswith('md')]
for fi in markdown_files:
code = process_file(os.path.join(root, fi))
write_codeblocks_to_different_files(code)
| import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
for i, code_block in enumerate(code):
print_block = '// %s : %i\n%s\n' % (file_name, i, code_block)
destination.write(print_block)
# Main Program
root_dir = '.'
destination = open('documentation.lpg', 'w')
if len(sys.argv) == 2:
root_dir = sys.argv[1]
for root, dirs, files in os.walk(root_dir):
for fi in files:
if fi.endswith('md'):
code = process_file(os.path.join(root, fi))
write_to_file(fi, code)
destination.close()
Split the documentation compilation into different filesimport sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
with open(file_name, 'w') as f:
f.write(code)
def write_codeblocks_to_different_files(code_blocks):
for i, code_block in enumerate(code_blocks):
write_to_file('codeblock%i.lpg' % i, code_block)
# Main Program
root_dir = '.'
if len(sys.argv) == 2:
root_dir = sys.argv[1]
if not os.path.exists(root_dir):
raise Exception('Directory does not exist')
for root, dirs, files in os.walk(root_dir):
markdown_files = [f for f in files if f.endswith('md')]
for fi in markdown_files:
code = process_file(os.path.join(root, fi))
write_codeblocks_to_different_files(code)
| <commit_before>import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
for i, code_block in enumerate(code):
print_block = '// %s : %i\n%s\n' % (file_name, i, code_block)
destination.write(print_block)
# Main Program
root_dir = '.'
destination = open('documentation.lpg', 'w')
if len(sys.argv) == 2:
root_dir = sys.argv[1]
for root, dirs, files in os.walk(root_dir):
for fi in files:
if fi.endswith('md'):
code = process_file(os.path.join(root, fi))
write_to_file(fi, code)
destination.close()
<commit_msg>Split the documentation compilation into different files<commit_after>import sys
import os
def find_code(text):
START_TAG = '```lpg'
END_TAG = '```'
first_index = text.find(START_TAG)
if first_index == -1:
return None, None
last_index = text.find(END_TAG, first_index + 1)
return first_index + len(START_TAG), last_index
def process_file(path):
content = open(path, 'r').read()
left, right = find_code(content)
code_pieces = []
while left is not None:
code_pieces.append(content[left:right].strip())
content = content[right:]
left, right = find_code(content)
return code_pieces
def write_to_file(file_name, code):
with open(file_name, 'w') as f:
f.write(code)
def write_codeblocks_to_different_files(code_blocks):
for i, code_block in enumerate(code_blocks):
write_to_file('codeblock%i.lpg' % i, code_block)
# Main Program
root_dir = '.'
if len(sys.argv) == 2:
root_dir = sys.argv[1]
if not os.path.exists(root_dir):
raise Exception('Directory does not exist')
for root, dirs, files in os.walk(root_dir):
markdown_files = [f for f in files if f.endswith('md')]
for fi in markdown_files:
code = process_file(os.path.join(root, fi))
write_codeblocks_to_different_files(code)
|
c7fd327623dfa84a91931771265932d4da95d766 | 001-xoxoxo-obj/harness.py | 001-xoxoxo-obj/harness.py | from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is None:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
continue
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
| from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is False:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
break
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
| Fix nierozpoznawania bledu gracza w pierwszym ruchu | Fix nierozpoznawania bledu gracza w pierwszym ruchu
| Python | mit | gynvael/stream,gynvael/stream,gynvael/stream,gynvael/stream,gynvael/stream,gynvael/stream | from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is None:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
continue
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
Fix nierozpoznawania bledu gracza w pierwszym ruchu | from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is False:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
break
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
| <commit_before>from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is None:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
continue
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
<commit_msg>Fix nierozpoznawania bledu gracza w pierwszym ruchu<commit_after> | from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is False:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
break
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
| from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is None:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
continue
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
Fix nierozpoznawania bledu gracza w pierwszym ruchufrom game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is False:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
break
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
| <commit_before>from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is None:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
continue
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
<commit_msg>Fix nierozpoznawania bledu gracza w pierwszym ruchu<commit_after>from game import Game
from input_con import InputCon
from output_con import OutputCon
class Harness():
def __init__(self, output, inputs):
self._game = Game()
self._output = output
self._inputs = inputs
def Start(self):
self._output.show_welcome()
while True:
self._output.show_board(self._game.get_board())
player_id = self._game.get_turn_no()
player = self._game.get_turn()
self._output.show_player_turn(player)
while True:
move = self._inputs[player_id].get_move()
if move is False:
self._output.show_move_error(player)
continue
if self._game.make_move(move) is False:
self._output.show_move_error(player)
break
break
if not self._game.get_end():
continue
# End of game.
self._output.show_board(self._game.get_board())
w = self._game.get_winner()
if w is None:
# Draw.
self._output.show_draw()
else:
self._output.show_winner(w)
break
def main():
inputcon1 = InputCon()
inputcon2 = InputCon()
outputcon = OutputCon()
player_inputs = [ inputcon1, inputcon2 ]
player_output = outputcon
h = Harness(player_output, player_inputs)
h.Start()
if __name__ == "__main__":
main()
|
9a34e663f9ef65cdac91c42dcf198ae28d4385be | txircd/modbase.py | txircd/modbase.py | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
} | Add a function for commands to process parameters | Add a function for commands to process parameters
| Python | bsd-3-clause | ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
passAdd a function for commands to process parameters | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
} | <commit_before># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass<commit_msg>Add a function for commands to process parameters<commit_after> | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
} | # The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
passAdd a function for commands to process parameters# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
} | <commit_before># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass<commit_msg>Add a function for commands to process parameters<commit_after># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
} |
a1cc1c08e9ff36ff0293f0224ca0acb41f065073 | alg_selection_sort.py | alg_selection_sort.py | def selection_sort(a_list):
"""Selection Sort algortihm.
Concept: Find out the maximun item's original slot first,
then switch it to the max slot. Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
| def selection_sort(a_list):
"""Selection Sort algortihm.
Concept:
- Find out the maximun item's original slot first,
- then swap it and the item at the max slot.
- Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
| Revise doc string for selection sort’s concept | Revise doc string for selection sort’s concept
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | def selection_sort(a_list):
"""Selection Sort algortihm.
Concept: Find out the maximun item's original slot first,
then switch it to the max slot. Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
Revise doc string for selection sort’s concept | def selection_sort(a_list):
"""Selection Sort algortihm.
Concept:
- Find out the maximun item's original slot first,
- then swap it and the item at the max slot.
- Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
| <commit_before>def selection_sort(a_list):
"""Selection Sort algortihm.
Concept: Find out the maximun item's original slot first,
then switch it to the max slot. Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
<commit_msg>Revise doc string for selection sort’s concept<commit_after> | def selection_sort(a_list):
"""Selection Sort algortihm.
Concept:
- Find out the maximun item's original slot first,
- then swap it and the item at the max slot.
- Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
| def selection_sort(a_list):
"""Selection Sort algortihm.
Concept: Find out the maximun item's original slot first,
then switch it to the max slot. Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
Revise doc string for selection sort’s conceptdef selection_sort(a_list):
"""Selection Sort algortihm.
Concept:
- Find out the maximun item's original slot first,
- then swap it and the item at the max slot.
- Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
| <commit_before>def selection_sort(a_list):
"""Selection Sort algortihm.
Concept: Find out the maximun item's original slot first,
then switch it to the max slot. Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
<commit_msg>Revise doc string for selection sort’s concept<commit_after>def selection_sort(a_list):
"""Selection Sort algortihm.
Concept:
- Find out the maximun item's original slot first,
- then swap it and the item at the max slot.
- Iterate the procedure.
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
temp = a_list[max_slot]
a_list[max_slot] = a_list[select_slot]
a_list[select_slot] = temp
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
|
1be7237169f0c0920d4891d7fb20b03327037cdb | osgtest/tests/test_40_proxy.py | osgtest/tests/test_40_proxy.py | import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import unittest
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
| import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import pprint
import errno
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_00_list_proxies(self):
command = ('ls', '-lF', '/tmp/x509up_u*')
status, stdout, _ = core.system(command, 'List proxies')
core.log_message(stdout)
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
try:
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
except OSError, e:
attributes = {}
for x in dir(e):
attributes[x] = getattr(e,x, None)
pprint.pprint(attributes)
raise
def test_02_grid_proxy_info(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-info', '-debug')
core.check_system(command, 'Run grid-proxy-info', user=True)
| Add debugging code to proxy generation | Add debugging code to proxy generation
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18806 4e558342-562e-0410-864c-e07659590f8c
| Python | apache-2.0 | efajardo/osg-test,efajardo/osg-test | import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import unittest
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
Add debugging code to proxy generation
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18806 4e558342-562e-0410-864c-e07659590f8c | import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import pprint
import errno
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_00_list_proxies(self):
command = ('ls', '-lF', '/tmp/x509up_u*')
status, stdout, _ = core.system(command, 'List proxies')
core.log_message(stdout)
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
try:
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
except OSError, e:
attributes = {}
for x in dir(e):
attributes[x] = getattr(e,x, None)
pprint.pprint(attributes)
raise
def test_02_grid_proxy_info(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-info', '-debug')
core.check_system(command, 'Run grid-proxy-info', user=True)
| <commit_before>import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import unittest
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
<commit_msg>Add debugging code to proxy generation
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18806 4e558342-562e-0410-864c-e07659590f8c<commit_after> | import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import pprint
import errno
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_00_list_proxies(self):
command = ('ls', '-lF', '/tmp/x509up_u*')
status, stdout, _ = core.system(command, 'List proxies')
core.log_message(stdout)
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
try:
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
except OSError, e:
attributes = {}
for x in dir(e):
attributes[x] = getattr(e,x, None)
pprint.pprint(attributes)
raise
def test_02_grid_proxy_info(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-info', '-debug')
core.check_system(command, 'Run grid-proxy-info', user=True)
| import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import unittest
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
Add debugging code to proxy generation
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18806 4e558342-562e-0410-864c-e07659590f8cimport osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import pprint
import errno
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_00_list_proxies(self):
command = ('ls', '-lF', '/tmp/x509up_u*')
status, stdout, _ = core.system(command, 'List proxies')
core.log_message(stdout)
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
try:
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
except OSError, e:
attributes = {}
for x in dir(e):
attributes[x] = getattr(e,x, None)
pprint.pprint(attributes)
raise
def test_02_grid_proxy_info(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-info', '-debug')
core.check_system(command, 'Run grid-proxy-info', user=True)
| <commit_before>import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import unittest
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
<commit_msg>Add debugging code to proxy generation
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18806 4e558342-562e-0410-864c-e07659590f8c<commit_after>import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
import pprint
import errno
class TestGridProxyInit(osgunittest.OSGTestCase):
def test_00_list_proxies(self):
command = ('ls', '-lF', '/tmp/x509up_u*')
status, stdout, _ = core.system(command, 'List proxies')
core.log_message(stdout)
def test_01_grid_proxy_init(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-init', '-debug')
password = core.options.password + '\n'
try:
core.check_system(command, 'Run grid-proxy-init', user=True,
stdin=password)
except OSError, e:
attributes = {}
for x in dir(e):
attributes[x] = getattr(e,x, None)
pprint.pprint(attributes)
raise
def test_02_grid_proxy_info(self):
core.skip_ok_unless_installed('globus-proxy-utils')
command = ('grid-proxy-info', '-debug')
core.check_system(command, 'Run grid-proxy-info', user=True)
|
a30e51ccb74bc55924be6f7f79dc4b6038c9b457 | altair/examples/bar_chart_with_highlighted_segment.py | altair/examples/bar_chart_with_highlighted_segment.py | """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
| """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
| Move bar chart with highlighted segment chart into the bar charts section | Move bar chart with highlighted segment chart into the bar charts section | Python | bsd-3-clause | altair-viz/altair | """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
Move bar chart with highlighted segment chart into the bar charts section | """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
| <commit_before>"""
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
<commit_msg>Move bar chart with highlighted segment chart into the bar charts section<commit_after> | """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
| """
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
Move bar chart with highlighted segment chart into the bar charts section"""
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
| <commit_before>"""
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
<commit_msg>Move bar chart with highlighted segment chart into the bar charts section<commit_after>"""
Bar Chart with Highlighted Segment
----------------------------------
This example shows a bar chart that highlights values beyond a threshold.
"""
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
09340916e7db6ba8ccb5697b9444fbccc0512103 | example/example/views.py | example/example/views.py | from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
| from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm, can_delete=True)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
| Add `can_delete=True` to the example formset | Add `can_delete=True` to the example formset
| Python | bsd-2-clause | pretix/django-formset-js,pretix/django-formset-js,pretix/django-formset-js | from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
Add `can_delete=True` to the example formset | from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm, can_delete=True)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
| <commit_before>from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
<commit_msg>Add `can_delete=True` to the example formset<commit_after> | from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm, can_delete=True)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
| from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
Add `can_delete=True` to the example formsetfrom django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm, can_delete=True)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
| <commit_before>from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
<commit_msg>Add `can_delete=True` to the example formset<commit_after>from django import forms
from django.forms.formsets import formset_factory
from django.shortcuts import render
from djangoformsetjs.utils import formset_media_js
class MyForm(forms.Form):
foo = forms.CharField()
class Media(object):
# The form must have `formset_media_js` in its Media
js = formset_media_js + (
# Other form javascript...
)
MyFormSet = formset_factory(MyForm, can_delete=True)
def formset_view(request):
formset = MyFormSet(request.POST or None)
if formset.is_valid():
pass
return render(request, 'formset.html', {'formset': formset})
|
de11b473a6134ed1403e91d55d30c23e6683a926 | test/runner/versions.py | test/runner/versions.py | #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print('.'.join(u'%s' % i for i in sys.version_info))
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
| #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print(sys.version)
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
| Revert "Relax ansible-test python version checking." | Revert "Relax ansible-test python version checking."
This reverts commit d6cc3c41874b64e346639549fd18d8c41be0db8b.
| Python | mit | thaim/ansible,thaim/ansible | #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print('.'.join(u'%s' % i for i in sys.version_info))
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
Revert "Relax ansible-test python version checking."
This reverts commit d6cc3c41874b64e346639549fd18d8c41be0db8b. | #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print(sys.version)
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
| <commit_before>#!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print('.'.join(u'%s' % i for i in sys.version_info))
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
<commit_msg>Revert "Relax ansible-test python version checking."
This reverts commit d6cc3c41874b64e346639549fd18d8c41be0db8b.<commit_after> | #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print(sys.version)
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
| #!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print('.'.join(u'%s' % i for i in sys.version_info))
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
Revert "Relax ansible-test python version checking."
This reverts commit d6cc3c41874b64e346639549fd18d8c41be0db8b.#!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print(sys.version)
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
| <commit_before>#!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print('.'.join(u'%s' % i for i in sys.version_info))
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
<commit_msg>Revert "Relax ansible-test python version checking."
This reverts commit d6cc3c41874b64e346639549fd18d8c41be0db8b.<commit_after>#!/usr/bin/env python
"""Show python and pip versions."""
import os
import sys
try:
import pip
except ImportError:
pip = None
print(sys.version)
if pip:
print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
|
c3ead540e7008ba1a3d01df695620bc952564805 | sphinx/fabfile.py | sphinx/fabfile.py | from fabric.api import run, env, roles
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify version number: fab latest:x.x.x")
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
| from fabric.api import run, env, roles
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
elif v == "latest":
raise RuntimeError("You can not pass 'latest' as fab argument. Use "
"fab latest:x.x.x instead.")
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify a version number: fab latest:x.x.x")
if exists("/www/bokeh/en/%s" % v):
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
else:
raise RuntimeError("We did not detect a %s docs version, please use "
"fab deploy:%s first." % v)
| Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink. | Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink.
| Python | bsd-3-clause | Karel-van-de-Plassche/bokeh,aavanian/bokeh,clairetang6/bokeh,matbra/bokeh,quasiben/bokeh,justacec/bokeh,schoolie/bokeh,rothnic/bokeh,evidation-health/bokeh,ptitjano/bokeh,muku42/bokeh,percyfal/bokeh,daodaoliang/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,CrazyGuo/bokeh,draperjames/bokeh,mindriot101/bokeh,percyfal/bokeh,caseyclements/bokeh,muku42/bokeh,rothnic/bokeh,phobson/bokeh,aiguofer/bokeh,daodaoliang/bokeh,timsnyder/bokeh,draperjames/bokeh,paultcochrane/bokeh,saifrahmed/bokeh,evidation-health/bokeh,stonebig/bokeh,paultcochrane/bokeh,ericmjl/bokeh,roxyboy/bokeh,phobson/bokeh,azjps/bokeh,timsnyder/bokeh,dennisobrien/bokeh,maxalbert/bokeh,timsnyder/bokeh,phobson/bokeh,khkaminska/bokeh,deeplook/bokeh,bokeh/bokeh,ericdill/bokeh,maxalbert/bokeh,schoolie/bokeh,xguse/bokeh,aiguofer/bokeh,ptitjano/bokeh,deeplook/bokeh,KasperPRasmussen/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,justacec/bokeh,aavanian/bokeh,msarahan/bokeh,dennisobrien/bokeh,philippjfr/bokeh,muku42/bokeh,stonebig/bokeh,roxyboy/bokeh,srinathv/bokeh,caseyclements/bokeh,tacaswell/bokeh,ChinaQuants/bokeh,mindriot101/bokeh,josherick/bokeh,paultcochrane/bokeh,ericdill/bokeh,xguse/bokeh,percyfal/bokeh,DuCorey/bokeh,azjps/bokeh,bokeh/bokeh,jakirkham/bokeh,rs2/bokeh,xguse/bokeh,aavanian/bokeh,gpfreitas/bokeh,timsnyder/bokeh,philippjfr/bokeh,rs2/bokeh,phobson/bokeh,schoolie/bokeh,maxalbert/bokeh,draperjames/bokeh,tacaswell/bokeh,aiguofer/bokeh,msarahan/bokeh,aavanian/bokeh,aiguofer/bokeh,ericdill/bokeh,jakirkham/bokeh,maxalbert/bokeh,srinathv/bokeh,htygithub/bokeh,dennisobrien/bokeh,daodaoliang/bokeh,deeplook/bokeh,roxyboy/bokeh,josherick/bokeh,gpfreitas/bokeh,ptitjano/bokeh,clairetang6/bokeh,quasiben/bokeh,jplourenco/bokeh,evidation-health/bokeh,rs2/bokeh,clairetang6/bokeh,CrazyGuo/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,KasperPRasmussen/bokeh,gpfreitas/bokeh,rothnic/bokeh,khkaminska/bokeh,muku42/bokeh,ericdill/bokeh,rothnic/bokeh,msarahan/bokeh,jplourenco/bokeh,ChinaQuants/bokeh,draperjames/bokeh,caseyclements/bokeh,daodaoliang/bokeh,jplourenco/bokeh,rs2/bokeh,philippjfr/bokeh,ptitjano/bokeh,saifrahmed/bokeh,stonebig/bokeh,evidation-health/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,bokeh/bokeh,CrazyGuo/bokeh,paultcochrane/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,philippjfr/bokeh,matbra/bokeh,schoolie/bokeh,htygithub/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,percyfal/bokeh,KasperPRasmussen/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,khkaminska/bokeh,ericmjl/bokeh,jakirkham/bokeh,tacaswell/bokeh,saifrahmed/bokeh,mindriot101/bokeh,draperjames/bokeh,gpfreitas/bokeh,DuCorey/bokeh,stonebig/bokeh,jplourenco/bokeh,azjps/bokeh,justacec/bokeh,josherick/bokeh,roxyboy/bokeh,matbra/bokeh,mindriot101/bokeh,dennisobrien/bokeh,jakirkham/bokeh,DuCorey/bokeh,deeplook/bokeh,ChinaQuants/bokeh,philippjfr/bokeh,aavanian/bokeh,carlvlewis/bokeh,saifrahmed/bokeh,rs2/bokeh,carlvlewis/bokeh,xguse/bokeh,matbra/bokeh,ptitjano/bokeh,srinathv/bokeh,bokeh/bokeh,percyfal/bokeh,tacaswell/bokeh,azjps/bokeh,jakirkham/bokeh,khkaminska/bokeh,caseyclements/bokeh,phobson/bokeh,quasiben/bokeh,srinathv/bokeh,carlvlewis/bokeh,CrazyGuo/bokeh,ericmjl/bokeh,msarahan/bokeh,clairetang6/bokeh,dennisobrien/bokeh,htygithub/bokeh,josherick/bokeh,ericmjl/bokeh,azjps/bokeh | from fabric.api import run, env, roles
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify version number: fab latest:x.x.x")
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink. | from fabric.api import run, env, roles
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
elif v == "latest":
raise RuntimeError("You can not pass 'latest' as fab argument. Use "
"fab latest:x.x.x instead.")
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify a version number: fab latest:x.x.x")
if exists("/www/bokeh/en/%s" % v):
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
else:
raise RuntimeError("We did not detect a %s docs version, please use "
"fab deploy:%s first." % v)
| <commit_before>from fabric.api import run, env, roles
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify version number: fab latest:x.x.x")
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
<commit_msg>Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink.<commit_after> | from fabric.api import run, env, roles
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
elif v == "latest":
raise RuntimeError("You can not pass 'latest' as fab argument. Use "
"fab latest:x.x.x instead.")
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify a version number: fab latest:x.x.x")
if exists("/www/bokeh/en/%s" % v):
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
else:
raise RuntimeError("We did not detect a %s docs version, please use "
"fab deploy:%s first." % v)
| from fabric.api import run, env, roles
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify version number: fab latest:x.x.x")
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink.from fabric.api import run, env, roles
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
elif v == "latest":
raise RuntimeError("You can not pass 'latest' as fab argument. Use "
"fab latest:x.x.x instead.")
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify a version number: fab latest:x.x.x")
if exists("/www/bokeh/en/%s" % v):
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
else:
raise RuntimeError("We did not detect a %s docs version, please use "
"fab deploy:%s first." % v)
| <commit_before>from fabric.api import run, env, roles
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify version number: fab latest:x.x.x")
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
<commit_msg>Add error if latest is passed to deploy. Also check if the path exist before meaking the symlink.<commit_after>from fabric.api import run, env, roles
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
import sys
sys.path.append("source")
import conf
env.roledefs = {
'web': ['bokeh.pydata.org']
}
env.user = "bokeh"
@roles('web')
def deploy(v=None):
if v is None:
v = conf.version
elif v == "latest":
raise RuntimeError("You can not pass 'latest' as fab argument. Use "
"fab latest:x.x.x instead.")
# make a backup of the old directory
run("rm -rf /www/bokeh/en/%s.bak" % v)
run("mkdir -p /www/bokeh/en/%s" % v)
run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))
rsync_project(
local_dir="_build/html/",
remote_dir="/www/bokeh/en/%s" % v,
delete=True
)
# set permissions
run("chmod -R g+w /www/bokeh/en/%s" % v)
@roles('web')
def latest(v=None):
if v is None:
raise RuntimeError("You need to specify a version number: fab latest:x.x.x")
if exists("/www/bokeh/en/%s" % v):
# switch the current symlink to new docs
run("rm /www/bokeh/en/latest")
run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
else:
raise RuntimeError("We did not detect a %s docs version, please use "
"fab deploy:%s first." % v)
|
cb28bba6ee642828df473383ea469a6aa46ca59c | skimage/util/unique.py | skimage/util/unique.py | import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
| import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
| Add note describing array copy if discontiguous | Add note describing array copy if discontiguous
| Python | bsd-3-clause | paalge/scikit-image,Midafi/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,juliusbierk/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,keflavich/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,juliusbierk/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,newville/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,Midafi/scikit-image,rjeli/scikit-image,newville/scikit-image,ClinicalGraphics/scikit-image,SamHames/scikit-image,paalge/scikit-image,michaelaye/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,oew1v07/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,keflavich/scikit-image,SamHames/scikit-image,almarklein/scikit-image,youprofit/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,blink1073/scikit-image,almarklein/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,robintw/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,pratapvardhan/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,jwiggins/scikit-image,Hiyorimi/scikit-image,bsipocz/scikit-image | import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
Add note describing array copy if discontiguous | import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
| <commit_before>import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
<commit_msg>Add note describing array copy if discontiguous<commit_after> | import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
| import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
Add note describing array copy if discontiguousimport numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
| <commit_before>import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
<commit_msg>Add note describing array copy if discontiguous<commit_after>import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
88ec3066a191f22b1faa3429ae89cbd76d45ac9b | aore/config/dev.py | aore/config/dev.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "127.0.0.1:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "localhost"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "192.168.0.37:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "192.168.0.37"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
| Test global ip with Sphinx | Test global ip with Sphinx
| Python | bsd-3-clause | jar3b/py-phias,jar3b/py-phias,jar3b/py-phias,jar3b/py-phias | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "127.0.0.1:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "localhost"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
Test global ip with Sphinx | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "192.168.0.37:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "192.168.0.37"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "127.0.0.1:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "localhost"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
<commit_msg>Test global ip with Sphinx<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "192.168.0.37:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "192.168.0.37"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "127.0.0.1:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "localhost"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
Test global ip with Sphinx# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "192.168.0.37:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "192.168.0.37"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "127.0.0.1:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "localhost"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
<commit_msg>Test global ip with Sphinx<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
from .common import *
sphinx_conf.listen = "192.168.0.37:9312"
sphinx_conf.var_dir = "C:\\Sphinx"
db_conf.database = "postgres"
db_conf.host = "192.168.0.37"
db_conf.port = 5432
db_conf.user = "postgres"
db_conf.password = "intercon"
unrar_config.path = "C:\\Program Files (x86)\\WinRAR\\unrar.exe"
folders.temp = "E:\\!TEMP"
basic.logging = True
|
f60b940205a5e1011ce1c9c5672cb262c4649c0b | app/mod_auth/forms.py | app/mod_auth/forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
| from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
| Validate username and password on signup | Validate username and password on signup
| Python | mit | ziel980/website,ziel980/website | from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
Validate username and password on signup | from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
| <commit_before>from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
<commit_msg>Validate username and password on signup<commit_after> | from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
| from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
Validate username and password on signupfrom flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
| <commit_before>from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
<commit_msg>Validate username and password on signup<commit_after>from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
|
3d38af257f55a0252cb41408a404faa66b30d512 | pyconde/speakers/models.py | pyconde/speakers/models.py | from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
@receiver(post_save, sender=User)
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
| Create a speaker profile for each user that is registered. | Create a speaker profile for each user that is registered.
| Python | bsd-3-clause | pysv/djep,EuroPython/djep,pysv/djep,EuroPython/djep,pysv/djep,EuroPython/djep,pysv/djep,EuroPython/djep,pysv/djep | from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
Create a speaker profile for each user that is registered. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
@receiver(post_save, sender=User)
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
| <commit_before>from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
<commit_msg>Create a speaker profile for each user that is registered.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
@receiver(post_save, sender=User)
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
| from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
Create a speaker profile for each user that is registered.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
@receiver(post_save, sender=User)
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
| <commit_before>from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
<commit_msg>Create a speaker profile for each user that is registered.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Speaker(models.Model):
"""
The speaker model acts as user-abstraction for various session and proposal
related objects.
"""
user = models.OneToOneField(User, related_name='speaker_profile')
def __unicode__(self):
if self.user.first_name and self.user.last_name:
return u"{0} {1}".format(self.user.first_name, self.user.last_name)
return self.user.username
def get_absolute_url(self):
return reverse('account_profile', kwargs={'uid': self.user.id})
@receiver(post_save, sender=User)
def create_speaker_profile(sender, instance, created, raw, **kwargs):
"""
Every user also is a potential speaker in the current implemention so we
also have to create a new speaker object for every newly created user
instance.
"""
if created:
Speaker(user=instance).save()
|
579286426cf20c5cd5d4c94d97fd0a55eb571f8f | talks_keeper/templatetags/tk_filters.py | talks_keeper/templatetags/tk_filters.py | from django import template
from django.forms import CheckboxInput
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
| from django import template
from django.forms import CheckboxInput
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
@register.filter(name='in_group')
def in_group(user, group_name):
group = Group.objects.get(name=group_name)
return True if group in user.groups.all() else False
| Add templatetag to check if user included in group | Add templatetag to check if user included in group
| Python | mit | samitnuk/talks_keeper,samitnuk/talks_keeper,samitnuk/talks_keeper | from django import template
from django.forms import CheckboxInput
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
Add templatetag to check if user included in group | from django import template
from django.forms import CheckboxInput
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
@register.filter(name='in_group')
def in_group(user, group_name):
group = Group.objects.get(name=group_name)
return True if group in user.groups.all() else False
| <commit_before>from django import template
from django.forms import CheckboxInput
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
<commit_msg>Add templatetag to check if user included in group<commit_after> | from django import template
from django.forms import CheckboxInput
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
@register.filter(name='in_group')
def in_group(user, group_name):
group = Group.objects.get(name=group_name)
return True if group in user.groups.all() else False
| from django import template
from django.forms import CheckboxInput
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
Add templatetag to check if user included in groupfrom django import template
from django.forms import CheckboxInput
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
@register.filter(name='in_group')
def in_group(user, group_name):
group = Group.objects.get(name=group_name)
return True if group in user.groups.all() else False
| <commit_before>from django import template
from django.forms import CheckboxInput
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
<commit_msg>Add templatetag to check if user included in group<commit_after>from django import template
from django.forms import CheckboxInput
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='addclass')
def addclass(value, arg):
return value.as_widget(attrs={'class': arg})
@register.filter(name='is_checkbox')
def is_checkbox(field):
return field.field.widget.__class__.__name__ == \
CheckboxInput().__class__.__name__
@register.filter(name='in_group')
def in_group(user, group_name):
group = Group.objects.get(name=group_name)
return True if group in user.groups.all() else False
|
d52c9731b0c6494e9f4181fc33f00cdf39adb3ca | tests/unit/test_util.py | tests/unit/test_util.py | import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
| import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
@pytest.has_internet
def test_emergency_compliment():
assert util.load_emergency_compliments()
| Add test for emergency compliments | Add test for emergency compliments
| Python | mit | yougov/pmxbot,yougov/pmxbot,yougov/pmxbot | import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
Add test for emergency compliments | import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
@pytest.has_internet
def test_emergency_compliment():
assert util.load_emergency_compliments()
| <commit_before>import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
<commit_msg>Add test for emergency compliments<commit_after> | import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
@pytest.has_internet
def test_emergency_compliment():
assert util.load_emergency_compliments()
| import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
Add test for emergency complimentsimport pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
@pytest.has_internet
def test_emergency_compliment():
assert util.load_emergency_compliments()
| <commit_before>import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
<commit_msg>Add test for emergency compliments<commit_after>import pytest
from pmxbot import util
@pytest.has_wordnik
def test_lookup():
assert util.lookup('dachshund') is not None
@pytest.has_internet
def test_emergency_compliment():
assert util.load_emergency_compliments()
|
5b8bcdd802858baae3854fbfb8758dc65bdd8d34 | hardware/unicorn_hat_hd/demo_lights.py | hardware/unicorn_hat_hd/demo_lights.py | #!/usr/bin/env python
import unicornhathd
import os
try:
unicornhathd.set_pixel(0, 0, 255, 255, 255)
unicornhathd.set_pixel(15, 0, 255, 255, 255)
unicornhathd.set_pixel(0, 15, 255, 255, 255)
unicornhathd.set_pixel(15, 15, 255, 255, 255)
unicornhathd.show()
raw_input("Press the <ENTER> key or <CTRL+C> to exit...")
except KeyboardInterrupt:
pass
unicornhathd.off()
| #!/usr/bin/env python
import unicornhathd
import os
from time import sleep
class Point:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def turn_on(self):
unicornhathd.set_pixel(self.x, self.y, 255, 255, 255)
def turn_off(self):
unicornhathd.set_pixel(self.x, self.y, 0, 0, 0)
def move(self):
self.turn_off()
self.x, self.dx = self.move_one_axis(self.x, self.dx)
self.y, self.dy = self.move_one_axis(self.y, self.dy)
self.turn_on()
def move_one_axis(self, x_or_y, dx_or_dy):
x_or_y += dx_or_dy
if x_or_y < 0 or x_or_y > 15:
dx_or_dy = dx_or_dy * -1
x_or_y += dx_or_dy
return x_or_y, dx_or_dy
print("Press <CTRL+C> to exit...")
unicornhathd.off()
# Bounce backwards and forwards along each edge:
p1 = Point(0, 0, 0, 1)
p2 = Point(0, 15, 1, 0)
p3 = Point(15, 0, -1, 0)
p4 = Point(15, 15, 0, -1)
p1.turn_on()
p2.turn_on()
p3.turn_on()
p4.turn_on()
try:
while True:
p1.move()
p2.move()
p3.move()
p4.move()
unicornhathd.show()
sleep(0.1)
except KeyboardInterrupt:
pass
unicornhathd.off()
| Make the 4 lights bounce backwards and forwards along each edge. | Make the 4 lights bounce backwards and forwards along each edge.
| Python | mit | claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code | #!/usr/bin/env python
import unicornhathd
import os
try:
unicornhathd.set_pixel(0, 0, 255, 255, 255)
unicornhathd.set_pixel(15, 0, 255, 255, 255)
unicornhathd.set_pixel(0, 15, 255, 255, 255)
unicornhathd.set_pixel(15, 15, 255, 255, 255)
unicornhathd.show()
raw_input("Press the <ENTER> key or <CTRL+C> to exit...")
except KeyboardInterrupt:
pass
unicornhathd.off()
Make the 4 lights bounce backwards and forwards along each edge. | #!/usr/bin/env python
import unicornhathd
import os
from time import sleep
class Point:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def turn_on(self):
unicornhathd.set_pixel(self.x, self.y, 255, 255, 255)
def turn_off(self):
unicornhathd.set_pixel(self.x, self.y, 0, 0, 0)
def move(self):
self.turn_off()
self.x, self.dx = self.move_one_axis(self.x, self.dx)
self.y, self.dy = self.move_one_axis(self.y, self.dy)
self.turn_on()
def move_one_axis(self, x_or_y, dx_or_dy):
x_or_y += dx_or_dy
if x_or_y < 0 or x_or_y > 15:
dx_or_dy = dx_or_dy * -1
x_or_y += dx_or_dy
return x_or_y, dx_or_dy
print("Press <CTRL+C> to exit...")
unicornhathd.off()
# Bounce backwards and forwards along each edge:
p1 = Point(0, 0, 0, 1)
p2 = Point(0, 15, 1, 0)
p3 = Point(15, 0, -1, 0)
p4 = Point(15, 15, 0, -1)
p1.turn_on()
p2.turn_on()
p3.turn_on()
p4.turn_on()
try:
while True:
p1.move()
p2.move()
p3.move()
p4.move()
unicornhathd.show()
sleep(0.1)
except KeyboardInterrupt:
pass
unicornhathd.off()
| <commit_before>#!/usr/bin/env python
import unicornhathd
import os
try:
unicornhathd.set_pixel(0, 0, 255, 255, 255)
unicornhathd.set_pixel(15, 0, 255, 255, 255)
unicornhathd.set_pixel(0, 15, 255, 255, 255)
unicornhathd.set_pixel(15, 15, 255, 255, 255)
unicornhathd.show()
raw_input("Press the <ENTER> key or <CTRL+C> to exit...")
except KeyboardInterrupt:
pass
unicornhathd.off()
<commit_msg>Make the 4 lights bounce backwards and forwards along each edge.<commit_after> | #!/usr/bin/env python
import unicornhathd
import os
from time import sleep
class Point:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def turn_on(self):
unicornhathd.set_pixel(self.x, self.y, 255, 255, 255)
def turn_off(self):
unicornhathd.set_pixel(self.x, self.y, 0, 0, 0)
def move(self):
self.turn_off()
self.x, self.dx = self.move_one_axis(self.x, self.dx)
self.y, self.dy = self.move_one_axis(self.y, self.dy)
self.turn_on()
def move_one_axis(self, x_or_y, dx_or_dy):
x_or_y += dx_or_dy
if x_or_y < 0 or x_or_y > 15:
dx_or_dy = dx_or_dy * -1
x_or_y += dx_or_dy
return x_or_y, dx_or_dy
print("Press <CTRL+C> to exit...")
unicornhathd.off()
# Bounce backwards and forwards along each edge:
p1 = Point(0, 0, 0, 1)
p2 = Point(0, 15, 1, 0)
p3 = Point(15, 0, -1, 0)
p4 = Point(15, 15, 0, -1)
p1.turn_on()
p2.turn_on()
p3.turn_on()
p4.turn_on()
try:
while True:
p1.move()
p2.move()
p3.move()
p4.move()
unicornhathd.show()
sleep(0.1)
except KeyboardInterrupt:
pass
unicornhathd.off()
| #!/usr/bin/env python
import unicornhathd
import os
try:
unicornhathd.set_pixel(0, 0, 255, 255, 255)
unicornhathd.set_pixel(15, 0, 255, 255, 255)
unicornhathd.set_pixel(0, 15, 255, 255, 255)
unicornhathd.set_pixel(15, 15, 255, 255, 255)
unicornhathd.show()
raw_input("Press the <ENTER> key or <CTRL+C> to exit...")
except KeyboardInterrupt:
pass
unicornhathd.off()
Make the 4 lights bounce backwards and forwards along each edge.#!/usr/bin/env python
import unicornhathd
import os
from time import sleep
class Point:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def turn_on(self):
unicornhathd.set_pixel(self.x, self.y, 255, 255, 255)
def turn_off(self):
unicornhathd.set_pixel(self.x, self.y, 0, 0, 0)
def move(self):
self.turn_off()
self.x, self.dx = self.move_one_axis(self.x, self.dx)
self.y, self.dy = self.move_one_axis(self.y, self.dy)
self.turn_on()
def move_one_axis(self, x_or_y, dx_or_dy):
x_or_y += dx_or_dy
if x_or_y < 0 or x_or_y > 15:
dx_or_dy = dx_or_dy * -1
x_or_y += dx_or_dy
return x_or_y, dx_or_dy
print("Press <CTRL+C> to exit...")
unicornhathd.off()
# Bounce backwards and forwards along each edge:
p1 = Point(0, 0, 0, 1)
p2 = Point(0, 15, 1, 0)
p3 = Point(15, 0, -1, 0)
p4 = Point(15, 15, 0, -1)
p1.turn_on()
p2.turn_on()
p3.turn_on()
p4.turn_on()
try:
while True:
p1.move()
p2.move()
p3.move()
p4.move()
unicornhathd.show()
sleep(0.1)
except KeyboardInterrupt:
pass
unicornhathd.off()
| <commit_before>#!/usr/bin/env python
import unicornhathd
import os
try:
unicornhathd.set_pixel(0, 0, 255, 255, 255)
unicornhathd.set_pixel(15, 0, 255, 255, 255)
unicornhathd.set_pixel(0, 15, 255, 255, 255)
unicornhathd.set_pixel(15, 15, 255, 255, 255)
unicornhathd.show()
raw_input("Press the <ENTER> key or <CTRL+C> to exit...")
except KeyboardInterrupt:
pass
unicornhathd.off()
<commit_msg>Make the 4 lights bounce backwards and forwards along each edge.<commit_after>#!/usr/bin/env python
import unicornhathd
import os
from time import sleep
class Point:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def turn_on(self):
unicornhathd.set_pixel(self.x, self.y, 255, 255, 255)
def turn_off(self):
unicornhathd.set_pixel(self.x, self.y, 0, 0, 0)
def move(self):
self.turn_off()
self.x, self.dx = self.move_one_axis(self.x, self.dx)
self.y, self.dy = self.move_one_axis(self.y, self.dy)
self.turn_on()
def move_one_axis(self, x_or_y, dx_or_dy):
x_or_y += dx_or_dy
if x_or_y < 0 or x_or_y > 15:
dx_or_dy = dx_or_dy * -1
x_or_y += dx_or_dy
return x_or_y, dx_or_dy
print("Press <CTRL+C> to exit...")
unicornhathd.off()
# Bounce backwards and forwards along each edge:
p1 = Point(0, 0, 0, 1)
p2 = Point(0, 15, 1, 0)
p3 = Point(15, 0, -1, 0)
p4 = Point(15, 15, 0, -1)
p1.turn_on()
p2.turn_on()
p3.turn_on()
p4.turn_on()
try:
while True:
p1.move()
p2.move()
p3.move()
p4.move()
unicornhathd.show()
sleep(0.1)
except KeyboardInterrupt:
pass
unicornhathd.off()
|
4aada26d0de09836f3b67b0fce136805cf11fa37 | thinc/extra/load_nlp.py | thinc/extra/load_nlp.py | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| Fix divide by zero error in vectors loading | Fix divide by zero error in vectors loading
| Python | mit | explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Fix divide by zero error in vectors loading | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| <commit_before>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Fix divide by zero error in vectors loading<commit_after> | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Fix divide by zero error in vectors loadingimport numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| <commit_before>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Fix divide by zero error in vectors loading<commit_after>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
59c4dd56e427e29eb26e81512c3066fe3f8b13b8 | tools/gdb/gdb_chrome.py | tools/gdb/gdb_chrome.py | #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
def lookup_function(val):
typ = str(val.type)
if typ == 'string16':
return String16Printer(val)
elif typ == 'GURL':
return GURLPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
| #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
class FilePathPrinter(object):
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['path_']['_M_dataplus']['_M_p']
def lookup_function(val):
type_to_printer = {
'string16': String16Printer,
'GURL': GURLPrinter,
'FilePath': FilePathPrinter,
}
printer = type_to_printer.get(str(val.type), None)
if printer:
return printer(val)
return None
gdb.pretty_printers.append(lookup_function)
| Add FilePath to the gdb pretty printers. | Add FilePath to the gdb pretty printers.
Review URL: http://codereview.chromium.org/6621017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | zcbenz/cefode-chromium,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,rogerwang/chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,nacl-webkit/chrome_deps,Jonekee/chromium.src,nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,Just-D/chromium-1,rogerwang/chromium,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,rogerwang/chromium,ChromiumWebApps/chromium,keishi/chromium,hujiajie/pa-chromium,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,keishi/chromium,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,patrickm/chromium.src,dednal/chromium.src,Jonekee/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,dushu1203/chromium.src,ondra-novak/chromium.src,robclark/chromium,robclark/chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,M4sse/chromium.src,dushu1203/chromium.src,patrickm/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,M4sse/chromium.src,robclark/chromium,jaruba/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,robclark/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,M4sse/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,rogerwang/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,robclark/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,keishi/chromium,hgl888/chromium-crosswalk-efl,ltilve/chromium,zcbenz/cefode-chromium,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,dednal/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,robclark/chromium,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,keishi/chromium,robclark/chromium,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,anirudhSK/chromium,dednal/chromium.src,mogoweb/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,nacl-webkit/chrome_deps,M4sse/chromium.src,ltilve/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,Just-D/chromium-1,anirudhSK/chromium,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,zcbenz/cefode-chromium,dednal/chromium.src,nacl-webkit/chrome_deps,robclark/chromium,robclark/chromium,mogoweb/chromium-crosswalk,keishi/chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,ltilve/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,littlstar/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,littlstar/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,ChromiumWebApps/chromium,Jonekee/chromium.src,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,keishi/chromium,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,timopulkkinen/BubbleFish,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,rogerwang/chromium,ltilve/chromium,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,keishi/chromium,rogerwang/chromium,ltilve/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,dushu1203/chromium.src,keishi/chromium,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,dednal/chromium.src,hujiajie/pa-chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,zcbenz/cefode-chromium,Chilledheart/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,rogerwang/chromium,M4sse/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,keishi/chromium,fujunwei/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,axinging/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Just-D/chromium-1,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,robclark/chromium,keishi/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,Jonekee/chromium.src,M4sse/chromium.src,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium | #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
def lookup_function(val):
typ = str(val.type)
if typ == 'string16':
return String16Printer(val)
elif typ == 'GURL':
return GURLPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
Add FilePath to the gdb pretty printers.
Review URL: http://codereview.chromium.org/6621017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98 | #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
class FilePathPrinter(object):
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['path_']['_M_dataplus']['_M_p']
def lookup_function(val):
type_to_printer = {
'string16': String16Printer,
'GURL': GURLPrinter,
'FilePath': FilePathPrinter,
}
printer = type_to_printer.get(str(val.type), None)
if printer:
return printer(val)
return None
gdb.pretty_printers.append(lookup_function)
| <commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
def lookup_function(val):
typ = str(val.type)
if typ == 'string16':
return String16Printer(val)
elif typ == 'GURL':
return GURLPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
<commit_msg>Add FilePath to the gdb pretty printers.
Review URL: http://codereview.chromium.org/6621017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
class FilePathPrinter(object):
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['path_']['_M_dataplus']['_M_p']
def lookup_function(val):
type_to_printer = {
'string16': String16Printer,
'GURL': GURLPrinter,
'FilePath': FilePathPrinter,
}
printer = type_to_printer.get(str(val.type), None)
if printer:
return printer(val)
return None
gdb.pretty_printers.append(lookup_function)
| #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
def lookup_function(val):
typ = str(val.type)
if typ == 'string16':
return String16Printer(val)
elif typ == 'GURL':
return GURLPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
Add FilePath to the gdb pretty printers.
Review URL: http://codereview.chromium.org/6621017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
class FilePathPrinter(object):
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['path_']['_M_dataplus']['_M_p']
def lookup_function(val):
type_to_printer = {
'string16': String16Printer,
'GURL': GURLPrinter,
'FilePath': FilePathPrinter,
}
printer = type_to_printer.get(str(val.type), None)
if printer:
return printer(val)
return None
gdb.pretty_printers.append(lookup_function)
| <commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
def lookup_function(val):
typ = str(val.type)
if typ == 'string16':
return String16Printer(val)
elif typ == 'GURL':
return GURLPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
<commit_msg>Add FilePath to the gdb pretty printers.
Review URL: http://codereview.chromium.org/6621017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GDB support for Chrome types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import gdb_chrome
This module relies on the WebKit gdb module already existing in
your Python path.
"""
import gdb
import webkit
class String16Printer(webkit.StringPrinter):
def to_string(self):
return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
class GURLPrinter(webkit.StringPrinter):
def to_string(self):
return self.val['spec_']
class FilePathPrinter(object):
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['path_']['_M_dataplus']['_M_p']
def lookup_function(val):
type_to_printer = {
'string16': String16Printer,
'GURL': GURLPrinter,
'FilePath': FilePathPrinter,
}
printer = type_to_printer.get(str(val.type), None)
if printer:
return printer(val)
return None
gdb.pretty_printers.append(lookup_function)
|
97401a56e59d06acdd455f111dbe993265f2a39d | setup.py | setup.py | from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
| from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
| Remove urbansimd from excluded packages. | Remove urbansimd from excluded packages.
| Python | bsd-3-clause | SANDAG/urbansim,apdjustino/urbansim,synthicity/urbansim,UDST/urbansim,UDST/urbansim,bricegnichols/urbansim,AZMAG/urbansim,ual/urbansim,UDST/urbansim,VladimirTyrin/urbansim,waddell/urbansim,VladimirTyrin/urbansim,bricegnichols/urbansim,SANDAG/urbansim,waddell/urbansim,waddell/urbansim,apdjustino/urbansim,apdjustino/urbansim,SANDAG/urbansim,synthicity/urbansim,ual/urbansim,VladimirTyrin/urbansim,synthicity/urbansim,apdjustino/urbansim,AZMAG/urbansim,ual/urbansim,waddell/urbansim,bricegnichols/urbansim,bricegnichols/urbansim,ual/urbansim,VladimirTyrin/urbansim,synthicity/urbansim,AZMAG/urbansim,UDST/urbansim,AZMAG/urbansim,SANDAG/urbansim | from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
Remove urbansimd from excluded packages. | from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
| <commit_before>from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
<commit_msg>Remove urbansimd from excluded packages.<commit_after> | from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
| from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
Remove urbansimd from excluded packages.from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
| <commit_before>from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
<commit_msg>Remove urbansimd from excluded packages.<commit_after>from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='ffoti@berkeley.edu',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
|
267b5392adaf3e3f93a22f95e5f1f161225a7f3a | setup.py | setup.py | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
],
)
| Add classifier for Python 3.6 | Add classifier for Python 3.6
| Python | mit | zsiciarz/django-pgallery,zsiciarz/django-pgallery | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
Add classifier for Python 3.6 | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
<commit_msg>Add classifier for Python 3.6<commit_after> | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
Add classifier for Python 3.6import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
<commit_msg>Add classifier for Python 3.6<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='https://github.com/zsiciarz/django-pgallery',
download_url='https://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.9',
'Pillow',
'psycopg2>=2.5',
'django-markitup>=2.0',
'django-model-utils>=2.0',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
],
)
|
ad5cb91fa011e067a96835e59e05581af3ea3a53 | acctwatch/configcheck.py | acctwatch/configcheck.py | import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
geoipdb = None
def main():
config = Configuration()
if config.WITH_GEOIP and not geoipdb:
print ("GeoIP is enabled, but unable to import module, please check installation. Disabling.")
config.WITH_GEOIP = False
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid.")
if __name__ == '__main__':
main()
| import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
print ("GeoIP is missing, please install dependency")
def main():
config = Configuration()
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
if __name__ == '__main__':
main()
| Clean up configuration check utility | Clean up configuration check utility
| Python | isc | GuardedRisk/Google-Apps-Auditing | import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
geoipdb = None
def main():
config = Configuration()
if config.WITH_GEOIP and not geoipdb:
print ("GeoIP is enabled, but unable to import module, please check installation. Disabling.")
config.WITH_GEOIP = False
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid.")
if __name__ == '__main__':
main()
Clean up configuration check utility | import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
print ("GeoIP is missing, please install dependency")
def main():
config = Configuration()
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
if __name__ == '__main__':
main()
| <commit_before>import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
geoipdb = None
def main():
config = Configuration()
if config.WITH_GEOIP and not geoipdb:
print ("GeoIP is enabled, but unable to import module, please check installation. Disabling.")
config.WITH_GEOIP = False
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid.")
if __name__ == '__main__':
main()
<commit_msg>Clean up configuration check utility<commit_after> | import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
print ("GeoIP is missing, please install dependency")
def main():
config = Configuration()
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
if __name__ == '__main__':
main()
| import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
geoipdb = None
def main():
config = Configuration()
if config.WITH_GEOIP and not geoipdb:
print ("GeoIP is enabled, but unable to import module, please check installation. Disabling.")
config.WITH_GEOIP = False
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid.")
if __name__ == '__main__':
main()
Clean up configuration check utilityimport httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
print ("GeoIP is missing, please install dependency")
def main():
config = Configuration()
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
if __name__ == '__main__':
main()
| <commit_before>import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
geoipdb = None
def main():
config = Configuration()
if config.WITH_GEOIP and not geoipdb:
print ("GeoIP is enabled, but unable to import module, please check installation. Disabling.")
config.WITH_GEOIP = False
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid.")
if __name__ == '__main__':
main()
<commit_msg>Clean up configuration check utility<commit_after>import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
from config import Configuration
try:
import geoip2.database as geoipdb
except ImportError:
print ("GeoIP is missing, please install dependency")
def main():
config = Configuration()
credentials = config.get_credentials()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Admin Reports API.
service = discovery.build('admin', 'reports_v1', http=http)
activities = service.activities()
try:
login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
print("Success!")
except client.AccessTokenRefreshError:
print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
if __name__ == '__main__':
main()
|
fae3f6aaba91167c5da2f3d5d9b6b1a66068f9f7 | setup.py | setup.py | from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| Test edit - to check svn email hook | Test edit - to check svn email hook | Python | bsd-3-clause | yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD | from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
Test edit - to check svn email hook | import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| <commit_before>from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
<commit_msg>Test edit - to check svn email hook<commit_after> | import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
Test edit - to check svn email hookimport os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| <commit_before>from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
<commit_msg>Test edit - to check svn email hook<commit_after>import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
|
50efc8ccccecc3f48fcf0e82ebc3390da48e0967 | setup.py | setup.py | from setuptools import setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=[
"auslib",
"auslib.admin",
"auslib.admin.views",
"auslib.blobs",
"auslib.migrate",
"auslib.migrate.versions",
"auslib.util",
"auslib.web",
"auslib.web.views",
],
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
| from setuptools import find_packages, setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=find_packages(exclude=["vendor"]),
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
| Use find_packages instead of an explicit list of packages for easier maintenance. | Use find_packages instead of an explicit list of packages for easier maintenance.
| Python | mpl-2.0 | nurav/balrog,testbhearsum/balrog,nurav/balrog,testbhearsum/balrog,mozbhearsum/balrog,aksareen/balrog,nurav/balrog,testbhearsum/balrog,nurav/balrog,mozbhearsum/balrog,tieu/balrog,mozbhearsum/balrog,tieu/balrog,aksareen/balrog,aksareen/balrog,tieu/balrog,mozbhearsum/balrog,tieu/balrog,aksareen/balrog,testbhearsum/balrog | from setuptools import setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=[
"auslib",
"auslib.admin",
"auslib.admin.views",
"auslib.blobs",
"auslib.migrate",
"auslib.migrate.versions",
"auslib.util",
"auslib.web",
"auslib.web.views",
],
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
Use find_packages instead of an explicit list of packages for easier maintenance. | from setuptools import find_packages, setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=find_packages(exclude=["vendor"]),
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
| <commit_before>from setuptools import setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=[
"auslib",
"auslib.admin",
"auslib.admin.views",
"auslib.blobs",
"auslib.migrate",
"auslib.migrate.versions",
"auslib.util",
"auslib.web",
"auslib.web.views",
],
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
<commit_msg>Use find_packages instead of an explicit list of packages for easier maintenance.<commit_after> | from setuptools import find_packages, setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=find_packages(exclude=["vendor"]),
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
| from setuptools import setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=[
"auslib",
"auslib.admin",
"auslib.admin.views",
"auslib.blobs",
"auslib.migrate",
"auslib.migrate.versions",
"auslib.util",
"auslib.web",
"auslib.web.views",
],
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
Use find_packages instead of an explicit list of packages for easier maintenance.from setuptools import find_packages, setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=find_packages(exclude=["vendor"]),
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
| <commit_before>from setuptools import setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=[
"auslib",
"auslib.admin",
"auslib.admin.views",
"auslib.blobs",
"auslib.migrate",
"auslib.migrate.versions",
"auslib.util",
"auslib.web",
"auslib.web.views",
],
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
<commit_msg>Use find_packages instead of an explicit list of packages for easier maintenance.<commit_after>from setuptools import find_packages, setup
setup(
name="balrog",
version="1.0",
description="Mozilla's Update Server",
author="Ben Hearsum",
author_email="ben@hearsum.ca",
packages=find_packages(exclude=["vendor"]),
include_package_data=True,
install_requires=[
"flask==0.10.1",
"Werkzeug==0.9.6",
"wtforms==2.0.1",
"flask-wtf==0.10.2",
"sqlalchemy-migrate==0.7.2",
"tempita==0.5.1",
"decorator==3.3.3",
"blinker==1.2",
"cef==0.5",
"flask-compress==1.0.2",
"itsdangerous==0.24",
"repoze.lru==0.6",
],
url="https://github.com/mozilla/balrog",
)
|
2a7877c1ed3e1dc9a5bcc27220847a5a75cf65ab | spiff/membership/management/commands/bill_members.py | spiff/membership/management/commands/bill_members.py | from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
endOfMonth, startOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
| from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
startOfMonth, endOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
| Fix swapped dates on invoices | Fix swapped dates on invoices
| Python | agpl-3.0 | SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff | from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
endOfMonth, startOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
Fix swapped dates on invoices | from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
startOfMonth, endOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
| <commit_before>from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
endOfMonth, startOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
<commit_msg>Fix swapped dates on invoices<commit_after> | from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
startOfMonth, endOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
| from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
endOfMonth, startOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
Fix swapped dates on invoicesfrom django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
startOfMonth, endOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
| <commit_before>from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
endOfMonth, startOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
<commit_msg>Fix swapped dates on invoices<commit_after>from django.core.management import BaseCommand
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
from spiff.payment.models import Invoice
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
for member in Member.objects.all():
if not member.billedForMonth():
if member.highestRank is not None and member.highestRank.monthlyDues > 0:
print "Billing", member, "for the month"
startOfMonth, endOfMonth = monthRange()
invoice = Invoice.objects.create(
user=member.user,
dueDate=endOfMonth,
)
for group in member.user.groups.all():
if group.rank.monthlyDues > 0:
lineItem = RankLineItem.objects.create(
rank = group.rank,
member = member,
activeFromDate=startOfMonth,
activeToDate=endOfMonth,
invoice=invoice
)
print "\tCreated", lineItem
invoice.draft = False
invoice.open = True
invoice.save()
print "\tInvoice saved!"
else:
print "%s has outstanding balance of $%s"%(
member,
member.outstandingBalance
)
|
e3409c94b64deac85deada28f57a30ae08d0083d | api/caching/listeners.py | api/caching/listeners.py | from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
| from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
| Fix passing arguments to ban_url | Fix passing arguments to ban_url
h/t @cwisecarver
| Python | apache-2.0 | chrisseto/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,alexschiller/osf.io,mluke93/osf.io,rdhyee/osf.io,zachjanicki/osf.io,wearpants/osf.io,leb2dg/osf.io,caneruguz/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,mattclark/osf.io,aaxelb/osf.io,felliott/osf.io,caseyrollins/osf.io,mluke93/osf.io,erinspace/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,mluo613/osf.io,binoculars/osf.io,mluo613/osf.io,cslzchen/osf.io,adlius/osf.io,cwisecarver/osf.io,alexschiller/osf.io,chrisseto/osf.io,mattclark/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,acshi/osf.io,TomHeatwole/osf.io,abought/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,rdhyee/osf.io,Nesiehr/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,rdhyee/osf.io,SSJohns/osf.io,RomanZWang/osf.io,emetsger/osf.io,cslzchen/osf.io,crcresearch/osf.io,jnayak1/osf.io,cslzchen/osf.io,aaxelb/osf.io,mfraezz/osf.io,jnayak1/osf.io,icereval/osf.io,Nesiehr/osf.io,abought/osf.io,abought/osf.io,mluo613/osf.io,alexschiller/osf.io,SSJohns/osf.io,zamattiac/osf.io,sloria/osf.io,binoculars/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,doublebits/osf.io,emetsger/osf.io,SSJohns/osf.io,zachjanicki/osf.io,adlius/osf.io,zamattiac/osf.io,binoculars/osf.io,amyshi188/osf.io,baylee-d/osf.io,sloria/osf.io,emetsger/osf.io,baylee-d/osf.io,TomBaxter/osf.io,laurenrevere/osf.io,aaxelb/osf.io,mfraezz/osf.io,cwisecarver/osf.io,rdhyee/osf.io,jnayak1/osf.io,felliott/osf.io,kch8qx/osf.io,mluke93/osf.io,mluo613/osf.io,felliott/osf.io,samchrisinger/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,acshi/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,pattisdr/osf.io,icereval/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,icereval/osf.io,caneruguz/osf.io,TomBaxter/osf.io,erinspace/osf.io,hmoco/osf.io,Nesiehr/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,zamattiac/osf.io,hmoco/osf.io,amyshi188/osf.io,kwierman/osf.io,mluke93/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,hmoco/osf.io,doublebits/osf.io,doublebits/osf.io,caneruguz/osf.io,acshi/osf.io,crcresearch/osf.io,erinspace/osf.io,mluo613/osf.io,kwierman/osf.io,Nesiehr/osf.io,doublebits/osf.io,RomanZWang/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,chrisseto/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,TomBaxter/osf.io,chennan47/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,hmoco/osf.io,acshi/osf.io,kwierman/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,abought/osf.io,sloria/osf.io,asanfilippo7/osf.io,chennan47/osf.io,kch8qx/osf.io,zachjanicki/osf.io,adlius/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,emetsger/osf.io,chennan47/osf.io,amyshi188/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,pattisdr/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,doublebits/osf.io,kch8qx/osf.io,wearpants/osf.io,laurenrevere/osf.io,mattclark/osf.io,acshi/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,felliott/osf.io,HalcyonChimera/osf.io,wearpants/osf.io | from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
Fix passing arguments to ban_url
h/t @cwisecarver | from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
| <commit_before>from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
<commit_msg>Fix passing arguments to ban_url
h/t @cwisecarver<commit_after> | from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
| from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
Fix passing arguments to ban_url
h/t @cwisecarverfrom functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
| <commit_before>from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
<commit_msg>Fix passing arguments to ban_url
h/t @cwisecarver<commit_after>from functools import partial
from api.caching.tasks import ban_url
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
|
dae054de92d6d864f77a337d269ba9b0c5ddeec4 | examples/charts/file/stacked_bar.py | examples/charts/file/stacked_bar.py | from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts._attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
| from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
| Fix import on stacked bar example. | Fix import on stacked bar example.
| Python | bsd-3-clause | percyfal/bokeh,maxalbert/bokeh,phobson/bokeh,ChinaQuants/bokeh,quasiben/bokeh,DuCorey/bokeh,schoolie/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,Karel-van-de-Plassche/bokeh,rs2/bokeh,DuCorey/bokeh,aavanian/bokeh,gpfreitas/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,dennisobrien/bokeh,maxalbert/bokeh,ericmjl/bokeh,jakirkham/bokeh,htygithub/bokeh,mindriot101/bokeh,phobson/bokeh,schoolie/bokeh,ericmjl/bokeh,timsnyder/bokeh,phobson/bokeh,azjps/bokeh,philippjfr/bokeh,ChinaQuants/bokeh,ericmjl/bokeh,schoolie/bokeh,stonebig/bokeh,gpfreitas/bokeh,clairetang6/bokeh,justacec/bokeh,philippjfr/bokeh,azjps/bokeh,ptitjano/bokeh,timsnyder/bokeh,msarahan/bokeh,bokeh/bokeh,jakirkham/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,ericmjl/bokeh,msarahan/bokeh,schoolie/bokeh,dennisobrien/bokeh,DuCorey/bokeh,philippjfr/bokeh,DuCorey/bokeh,schoolie/bokeh,mindriot101/bokeh,jakirkham/bokeh,DuCorey/bokeh,bokeh/bokeh,justacec/bokeh,philippjfr/bokeh,dennisobrien/bokeh,justacec/bokeh,ericmjl/bokeh,stonebig/bokeh,stonebig/bokeh,maxalbert/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,justacec/bokeh,ptitjano/bokeh,aiguofer/bokeh,mindriot101/bokeh,bokeh/bokeh,bokeh/bokeh,rs2/bokeh,mindriot101/bokeh,rs2/bokeh,htygithub/bokeh,KasperPRasmussen/bokeh,clairetang6/bokeh,azjps/bokeh,aiguofer/bokeh,ChinaQuants/bokeh,clairetang6/bokeh,quasiben/bokeh,aavanian/bokeh,rs2/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,quasiben/bokeh,maxalbert/bokeh,azjps/bokeh,aiguofer/bokeh,philippjfr/bokeh,draperjames/bokeh,timsnyder/bokeh,ptitjano/bokeh,gpfreitas/bokeh,gpfreitas/bokeh,aavanian/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,dennisobrien/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,dennisobrien/bokeh,percyfal/bokeh,htygithub/bokeh,draperjames/bokeh,percyfal/bokeh,azjps/bokeh,clairetang6/bokeh,jakirkham/bokeh,percyfal/bokeh,ptitjano/bokeh,htygithub/bokeh,msarahan/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,timsnyder/bokeh,aiguofer/bokeh,jakirkham/bokeh,percyfal/bokeh,draperjames/bokeh,rs2/bokeh | from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts._attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
Fix import on stacked bar example. | from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
| <commit_before>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts._attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
<commit_msg>Fix import on stacked bar example.<commit_after> | from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
| from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts._attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
Fix import on stacked bar example.from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
| <commit_before>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts._attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
<commit_msg>Fix import on stacked bar example.<commit_after>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
df = df_from_json(data)
# filter by countries with at least one medal and sort
df = df[df['medals.total'] > 0]
df = df.sort("medals.total", ascending=False)
df = df.rename(columns={'medals.gold': 'gold', 'medals.silver': 'silver',
'medals.bronze': 'bronze', 'medals.total': 'total'})
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Stacked bars")
output_file("stacked_bar.html")
show(bar)
|
d3e2a11f72f6de811f39ac10aa0abde74b99d269 | hcibench/pipeline/__init__.py | hcibench/pipeline/__init__.py | """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
| """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
| Make Estimator importable from pipeline. | Make Estimator importable from pipeline.
| Python | mit | ucdrascal/axopy,ucdrascal/hcibench | """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
Make Estimator importable from pipeline. | """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
| <commit_before>"""
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
<commit_msg>Make Estimator importable from pipeline.<commit_after> | """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
| """
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
Make Estimator importable from pipeline."""
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
| <commit_before>"""
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
<commit_msg>Make Estimator importable from pipeline.<commit_after>"""
The :mod:`hcibench.pipeline` module provides a flexible infrastructure for
data processing and implements some common types of processing blocks.
"""
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
dc0f82fb424bce899493be9ba483a1fe16ea4f64 | setup.py | setup.py |
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
|
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel',
'bumpversion'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
| Add bumpversion as a dev requirement. | Add bumpversion as a dev requirement.
| Python | mit | TkTech/Jawa,TkTech/Jawa |
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
Add bumpversion as a dev requirement. |
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel',
'bumpversion'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
| <commit_before>
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
<commit_msg>Add bumpversion as a dev requirement.<commit_after> |
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel',
'bumpversion'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
|
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
Add bumpversion as a dev requirement.
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel',
'bumpversion'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
| <commit_before>
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
<commit_msg>Add bumpversion as a dev requirement.<commit_after>
from setuptools import setup, find_packages
setup(
name='jawa',
packages=find_packages(),
version='2.1.1',
python_requires='>=3.6',
description='Doing fun stuff with JVM ClassFiles.',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
author='Tyler Kennedy',
author_email='tk@tkte.ch',
url='http://github.com/TkTech/Jawa',
keywords=[
'java',
'disassembly',
'disassembler',
'assembly'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Assemblers'
],
install_requires=[
'click>=5.0'
],
tests_require=[
'pytest>=2.10',
],
extras_require={
'dev': [
'pytest',
'sphinx',
'sphinxcontrib-googleanalytics',
'sphinx_rtd_theme',
'sphinx-click',
'ghp-import',
'pyyaml',
'ipython',
'twine',
'wheel',
'bumpversion'
]
},
entry_points='''
[console_scripts]
jawa=jawa.cli:cli
'''
)
|
e91b691ba2e9a83d8cc94f42bdc41c9a7350c790 | setup.py | setup.py | #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=["License :: OSI Approved :: MIT License"],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
| #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
| Add trove classifiers specifying Python 3 support. | Add trove classifiers specifying Python 3 support.
| Python | mit | testing-cabal/extras | #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=["License :: OSI Approved :: MIT License"],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
Add trove classifiers specifying Python 3 support. | #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
| <commit_before>#!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=["License :: OSI Approved :: MIT License"],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
<commit_msg>Add trove classifiers specifying Python 3 support.<commit_after> | #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
| #!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=["License :: OSI Approved :: MIT License"],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
Add trove classifiers specifying Python 3 support.#!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
| <commit_before>#!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=["License :: OSI Approved :: MIT License"],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
<commit_msg>Add trove classifiers specifying Python 3 support.<commit_after>#!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
|
0ed714d6982d8d4cec628a7549a0a348526f0cf2 | setup.py | setup.py | import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5.0',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| Add z Patch number as per symver.org | Add z Patch number as per symver.org
| Python | mit | jamescooke/factory_djoy | import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add z Patch number as per symver.org | import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5.0',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| <commit_before>import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add z Patch number as per symver.org<commit_after> | import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5.0',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add z Patch number as per symver.orgimport os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5.0',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| <commit_before>import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add z Patch number as per symver.org<commit_after>import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.5.0',
description='Factories for Django, creating valid instances every time',
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='github@jamescooke.info',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.8',
'factory_boy>=2.7',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
fdb7c400adb777cdc60cf034569d81e95797cc10 | infupy/backends/common.py | infupy/backends/common.py | import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
@abstractmethod
def readRate(self):
return 0
@abstractmethod
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
raise NotImplementedError
# Read Perfusion related values
@abstractmethod
def readRate(self):
raise NotImplementedError
@abstractmethod
def readVolume(self):
raise NotImplementedError
# Infusion control
def setRate(self, rate):
raise NotImplementedError
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| Make abstract methods raise not implemented | Make abstract methods raise not implemented
| Python | isc | jaj42/infupy | import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
@abstractmethod
def readRate(self):
return 0
@abstractmethod
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
Make abstract methods raise not implemented | import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
raise NotImplementedError
# Read Perfusion related values
@abstractmethod
def readRate(self):
raise NotImplementedError
@abstractmethod
def readVolume(self):
raise NotImplementedError
# Infusion control
def setRate(self, rate):
raise NotImplementedError
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| <commit_before>import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
@abstractmethod
def readRate(self):
return 0
@abstractmethod
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
<commit_msg>Make abstract methods raise not implemented<commit_after> | import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
raise NotImplementedError
# Read Perfusion related values
@abstractmethod
def readRate(self):
raise NotImplementedError
@abstractmethod
def readVolume(self):
raise NotImplementedError
# Infusion control
def setRate(self, rate):
raise NotImplementedError
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
@abstractmethod
def readRate(self):
return 0
@abstractmethod
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
Make abstract methods raise not implementedimport sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
raise NotImplementedError
# Read Perfusion related values
@abstractmethod
def readRate(self):
raise NotImplementedError
@abstractmethod
def readVolume(self):
raise NotImplementedError
# Infusion control
def setRate(self, rate):
raise NotImplementedError
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| <commit_before>import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
@abstractmethod
def readRate(self):
return 0
@abstractmethod
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
<commit_msg>Make abstract methods raise not implemented<commit_after>import sys
from abc import ABCMeta, abstractmethod
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe(metaclass=ABCMeta):
_events = set()
@abstractmethod
def execCommand(self, msg):
raise NotImplementedError
# Read Perfusion related values
@abstractmethod
def readRate(self):
raise NotImplementedError
@abstractmethod
def readVolume(self):
raise NotImplementedError
# Infusion control
def setRate(self, rate):
raise NotImplementedError
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
e635af49a1f72475980ff91406942707a369935d | setup.py | setup.py |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.2.dev1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) | Change version to dev version | Change version to dev version
| Python | apache-2.0 | j5int/py-exe-builder |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
)Change version to dev version |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.2.dev1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) | <commit_before>
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
)<commit_msg>Change version to dev version<commit_after> |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.2.dev1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) |
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
)Change version to dev version
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.2.dev1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) | <commit_before>
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
)<commit_msg>Change version to dev version<commit_after>
__author__ = 'matth'
from distutils.core import setup
from setuptools import find_packages
setup(
name='py-exe-builder',
version='0.2.dev1',
packages = find_packages(),
license='Apache License, Version 2.0',
description='Uses py2exe to create small exe stubs that leverage a full python installation, rather than packing the required pyc files in to the executable.',
long_description=open('README.md').read(),
url='http://www.j5int.com/',
author='j5 International',
author_email='support@j5int.com',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires = ["py2exe"],
) |
d93b1c1feadb3e8c2dd0643a19b025faec305a7a | setup.py | setup.py | """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
},
include_package_data=True,
zip_safe=False,
)
| """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
"pyperf_profilers": [
"runtime = pyperf.profilers.runtime:RuntimeProfiler",
"memory_max = pyperf.profilers.memory:MaxMemoryProfiler",
"memory_min = pyperf.profilers.memory:MinMemoryProfiler",
"memory_avg = pyperf.profilers.memory:AvgMemoryProfiler",
],
},
include_package_data=True,
zip_safe=False,
)
| Add entry_point for profiler implementations | Add entry_point for profiler implementations
| Python | apache-2.0 | kevinconway/PyPerf,kevinconway/PyPerf,kevinconway/PyPerf | """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
},
include_package_data=True,
zip_safe=False,
)
Add entry_point for profiler implementations | """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
"pyperf_profilers": [
"runtime = pyperf.profilers.runtime:RuntimeProfiler",
"memory_max = pyperf.profilers.memory:MaxMemoryProfiler",
"memory_min = pyperf.profilers.memory:MinMemoryProfiler",
"memory_avg = pyperf.profilers.memory:AvgMemoryProfiler",
],
},
include_package_data=True,
zip_safe=False,
)
| <commit_before>"""Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Add entry_point for profiler implementations<commit_after> | """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
"pyperf_profilers": [
"runtime = pyperf.profilers.runtime:RuntimeProfiler",
"memory_max = pyperf.profilers.memory:MaxMemoryProfiler",
"memory_min = pyperf.profilers.memory:MinMemoryProfiler",
"memory_avg = pyperf.profilers.memory:AvgMemoryProfiler",
],
},
include_package_data=True,
zip_safe=False,
)
| """Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
},
include_package_data=True,
zip_safe=False,
)
Add entry_point for profiler implementations"""Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
"pyperf_profilers": [
"runtime = pyperf.profilers.runtime:RuntimeProfiler",
"memory_max = pyperf.profilers.memory:MaxMemoryProfiler",
"memory_min = pyperf.profilers.memory:MinMemoryProfiler",
"memory_avg = pyperf.profilers.memory:AvgMemoryProfiler",
],
},
include_package_data=True,
zip_safe=False,
)
| <commit_before>"""Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Add entry_point for profiler implementations<commit_after>"""Setuptools packaging configuration for pyperf."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
README = f.read()
setup(
name='py-perf',
version='0.0.1',
url='https://github.com/kevinconway/PyPerf',
license="Apache2",
description='A service for profiling Python snippets.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=README,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'psutil',
'memory_profiler',
'daemons',
'six',
'amqp',
'stevedore',
],
entry_points={
"pyperf_messages": [
"profile_request = pyperf.messages:ProfileRequest",
"profile_response = pyperf.messages:ProfileResponse",
"profile_failure = pyperf.messages:ProfileFailure",
],
"pyperf_transports": [
"amqp = pyperf.transports.amqp:AmqpTransport",
],
"pyperf_profilers": [
"runtime = pyperf.profilers.runtime:RuntimeProfiler",
"memory_max = pyperf.profilers.memory:MaxMemoryProfiler",
"memory_min = pyperf.profilers.memory:MinMemoryProfiler",
"memory_avg = pyperf.profilers.memory:AvgMemoryProfiler",
],
},
include_package_data=True,
zip_safe=False,
)
|
c656bca77d13afcfd8d6df0286b632433e08def1 | setup.py | setup.py | import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
| import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
| Add subdirs of nativeconfig package to build. | Add subdirs of nativeconfig package to build.
| Python | mit | GreatFruitOmsk/nativeconfig | import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
Add subdirs of nativeconfig package to build. | import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
| <commit_before>import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
<commit_msg>Add subdirs of nativeconfig package to build.<commit_after> | import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
| import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
Add subdirs of nativeconfig package to build.import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
| <commit_before>import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
<commit_msg>Add subdirs of nativeconfig package to build.<commit_after>import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='kulakov.ilya@gmail.com',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
|
ff921ad5b1d85dc9554dfcd9d94d96f9f80b0d2b | setup.py | setup.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
import os
import sys
from glob import glob
os.chdir(os.path.dirname(__file__))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except FileExistsError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
from __future__ import print_function, unicode_literals
import os
import sys
from glob import glob
os.chdir(os.path.dirname(os.path.abspath(__file__)))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except OSError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
| Fix compatibility with Python 2 ;) | Fix compatibility with Python 2 ;)
| Python | isc | TobiX/dotfiles,TobiX/dotfiles | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
import os
import sys
from glob import glob
os.chdir(os.path.dirname(__file__))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except FileExistsError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
Fix compatibility with Python 2 ;) | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
from __future__ import print_function, unicode_literals
import os
import sys
from glob import glob
os.chdir(os.path.dirname(os.path.abspath(__file__)))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except OSError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
import os
import sys
from glob import glob
os.chdir(os.path.dirname(__file__))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except FileExistsError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
<commit_msg>Fix compatibility with Python 2 ;)<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
from __future__ import print_function, unicode_literals
import os
import sys
from glob import glob
os.chdir(os.path.dirname(os.path.abspath(__file__)))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except OSError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
import os
import sys
from glob import glob
os.chdir(os.path.dirname(__file__))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except FileExistsError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
Fix compatibility with Python 2 ;)#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
from __future__ import print_function, unicode_literals
import os
import sys
from glob import glob
os.chdir(os.path.dirname(os.path.abspath(__file__)))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except OSError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
import os
import sys
from glob import glob
os.chdir(os.path.dirname(__file__))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except FileExistsError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
<commit_msg>Fix compatibility with Python 2 ;)<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# © 2017 qsuscs, TobiX
from __future__ import print_function, unicode_literals
import os
import sys
from glob import glob
os.chdir(os.path.dirname(os.path.abspath(__file__)))
exit = 0
for f in glob('dot.*'):
dst = os.path.expanduser('~/' + f[3:].replace(u'\u2571', '/'))
src = os.path.join(os.getcwd(), f)
src_rel = os.path.relpath(src, os.path.dirname(dst))
try:
os.makedirs(os.path.dirname(dst))
except OSError:
pass
try:
os.symlink(src_rel, dst)
except OSError:
# Broken symbolic links do not "exist"
if not os.path.exists(dst) or not os.path.samefile(src, dst):
print(dst + " exists and does not link do " + src)
exit = 1
sys.exit(exit)
|
c638dbf619030c8d207e3bfd2e711da7c6c5cdf4 | passman.py | passman.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
| #!/usr/bin/python3
# -*- coding: utf-8 -*-
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
| Clean up main a bit | Clean up main a bit
| Python | mit | regexpressyourself/passman | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
Clean up main a bit | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
| <commit_before>#!/usr/bin/python3
# -*- coding: utf-8 -*-
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
<commit_msg>Clean up main a bit<commit_after> | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
| #!/usr/bin/python3
# -*- coding: utf-8 -*-
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
Clean up main a bit#!/usr/bin/python3
# -*- coding: utf-8 -*-
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
| <commit_before>#!/usr/bin/python3
# -*- coding: utf-8 -*-
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
<commit_msg>Clean up main a bit<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
|
8111060bae0818a44dc6669bf8ae011a1e612857 | hunter/reviewsapi.py | hunter/reviewsapi.py | import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
| import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda:requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda:requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda:requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
| Remove whitespaces from lambda expression | Remove whitespaces from lambda expression
| Python | mit | anapaulagomes/reviews-assigner | import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
Remove whitespaces from lambda expression | import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda:requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda:requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda:requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
| <commit_before>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
<commit_msg>Remove whitespaces from lambda expression<commit_after> | import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda:requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda:requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda:requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
| import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
Remove whitespaces from lambda expressionimport requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda:requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda:requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda:requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
| <commit_before>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
<commit_msg>Remove whitespaces from lambda expression<commit_after>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda:requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda:requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, projects):
return self.execute(lambda:requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
|
1dbc30202bddfd4f03bdc9a8005de3c363d2ac1d | blazar/plugins/dummy_vm_plugin.py | blazar/plugins/dummy_vm_plugin.py | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| Add update_reservation to dummy plugin | Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7
| Python | apache-2.0 | ChameleonCloud/blazar,ChameleonCloud/blazar,openstack/blazar,stackforge/blazar,stackforge/blazar,openstack/blazar | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| <commit_before># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
<commit_msg>Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7<commit_after> | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| <commit_before># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
<commit_msg>Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7<commit_after># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
|
b7627255c04e51ebb28f31365cff28ee9abcd05c | openspending/ui/test/functional/test_home.py | openspending/ui/test/functional/test_home.py | from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='locale'))
| from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='set_locale'))
| Fix test for locale route generation. | Fix test for locale route generation. | Python | agpl-3.0 | CivicVision/datahub,spendb/spendb,johnjohndoe/spendb,pudo/spendb,johnjohndoe/spendb,nathanhilbert/FPA_Core,openspending/spendb,CivicVision/datahub,spendb/spendb,johnjohndoe/spendb,USStateDept/FPA_Core,USStateDept/FPA_Core,openspending/spendb,pudo/spendb,CivicVision/datahub,nathanhilbert/FPA_Core,spendb/spendb,openspending/spendb,nathanhilbert/FPA_Core,pudo/spendb,USStateDept/FPA_Core | from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='locale'))
Fix test for locale route generation. | from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='set_locale'))
| <commit_before>from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='locale'))
<commit_msg>Fix test for locale route generation. <commit_after> | from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='set_locale'))
| from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='locale'))
Fix test for locale route generation. from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='set_locale'))
| <commit_before>from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='locale'))
<commit_msg>Fix test for locale route generation. <commit_after>from .. import ControllerTestCase, url
class TestHomeController(ControllerTestCase):
def test_index(self):
response = self.app.get(url(controller='home', action='index'))
assert 'OpenSpending' in response
def test_locale(self):
response = self.app.get(url(controller='home', action='set_locale'))
|
16d77ff6a8d20773070630e1c7abb23c66345d72 | setup.py | setup.py | from setuptools import setup
setup(name='q', version='2.4', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
| from setuptools import setup
setup(name='q', version='2.5', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
| Advance PyPI version to 2.5. | Advance PyPI version to 2.5.
| Python | apache-2.0 | zestyping/q | from setuptools import setup
setup(name='q', version='2.4', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
Advance PyPI version to 2.5. | from setuptools import setup
setup(name='q', version='2.5', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
| <commit_before>from setuptools import setup
setup(name='q', version='2.4', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
<commit_msg>Advance PyPI version to 2.5.<commit_after> | from setuptools import setup
setup(name='q', version='2.5', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
| from setuptools import setup
setup(name='q', version='2.4', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
Advance PyPI version to 2.5.from setuptools import setup
setup(name='q', version='2.5', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
| <commit_before>from setuptools import setup
setup(name='q', version='2.4', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
<commit_msg>Advance PyPI version to 2.5.<commit_after>from setuptools import setup
setup(name='q', version='2.5', py_modules=['q'],
description='Quick-and-dirty debugging output for tired programmers',
author='Ka-Ping Yee', author_email='ping@zesty.ca',
license='Apache License 2.0',
url='http://github.com/zestyping/q', classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'
])
|
7fa490cb598aca2848ce886dfc45bb8606f07e58 | backend/geonature/core/gn_profiles/models.py | backend/geonature/core/gn_profiles/models.py | from geonature.utils.env import DB
from utils_flask_sqla.serializers import serializable
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
| from flask import current_app
from geoalchemy2 import Geometry
from utils_flask_sqla.serializers import serializable
from utils_flask_sqla_geo.serializers import geoserializable
from geonature.utils.env import DB
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
@serializable
@geoserializable
class VmValidProfiles(DB.Model):
__tablename__ = "vm_valid_profiles"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"]))
altitude_min = DB.Column(DB.Integer)
altitude_max = DB.Column(DB.Integer)
first_valid_data = DB.Column(DB.DateTime)
last_valid_data = DB.Column(DB.DateTime)
count_valid_data = DB.Column(DB.Integer) | Add VM valid profile model | Add VM valid profile model
| Python | bsd-2-clause | PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature | from geonature.utils.env import DB
from utils_flask_sqla.serializers import serializable
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
Add VM valid profile model | from flask import current_app
from geoalchemy2 import Geometry
from utils_flask_sqla.serializers import serializable
from utils_flask_sqla_geo.serializers import geoserializable
from geonature.utils.env import DB
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
@serializable
@geoserializable
class VmValidProfiles(DB.Model):
__tablename__ = "vm_valid_profiles"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"]))
altitude_min = DB.Column(DB.Integer)
altitude_max = DB.Column(DB.Integer)
first_valid_data = DB.Column(DB.DateTime)
last_valid_data = DB.Column(DB.DateTime)
count_valid_data = DB.Column(DB.Integer) | <commit_before>from geonature.utils.env import DB
from utils_flask_sqla.serializers import serializable
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
<commit_msg>Add VM valid profile model<commit_after> | from flask import current_app
from geoalchemy2 import Geometry
from utils_flask_sqla.serializers import serializable
from utils_flask_sqla_geo.serializers import geoserializable
from geonature.utils.env import DB
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
@serializable
@geoserializable
class VmValidProfiles(DB.Model):
__tablename__ = "vm_valid_profiles"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"]))
altitude_min = DB.Column(DB.Integer)
altitude_max = DB.Column(DB.Integer)
first_valid_data = DB.Column(DB.DateTime)
last_valid_data = DB.Column(DB.DateTime)
count_valid_data = DB.Column(DB.Integer) | from geonature.utils.env import DB
from utils_flask_sqla.serializers import serializable
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
Add VM valid profile modelfrom flask import current_app
from geoalchemy2 import Geometry
from utils_flask_sqla.serializers import serializable
from utils_flask_sqla_geo.serializers import geoserializable
from geonature.utils.env import DB
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
@serializable
@geoserializable
class VmValidProfiles(DB.Model):
__tablename__ = "vm_valid_profiles"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"]))
altitude_min = DB.Column(DB.Integer)
altitude_max = DB.Column(DB.Integer)
first_valid_data = DB.Column(DB.DateTime)
last_valid_data = DB.Column(DB.DateTime)
count_valid_data = DB.Column(DB.Integer) | <commit_before>from geonature.utils.env import DB
from utils_flask_sqla.serializers import serializable
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
<commit_msg>Add VM valid profile model<commit_after>from flask import current_app
from geoalchemy2 import Geometry
from utils_flask_sqla.serializers import serializable
from utils_flask_sqla_geo.serializers import geoserializable
from geonature.utils.env import DB
@serializable
class VmCorTaxonPhenology(DB.Model):
__tablename__ = "vm_cor_taxon_phenology"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
period = DB.Column(DB.Integer)
id_nomenclature_life_stage = DB.Column(DB.Integer)
id_altitude_range = DB.Column(DB.Integer)
count_valid_data = DB.Column(DB.Integer)
@serializable
@geoserializable
class VmValidProfiles(DB.Model):
__tablename__ = "vm_valid_profiles"
__table_args__ = {"schema": "gn_profiles"}
cd_ref = DB.Column(DB.Integer)
valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"]))
altitude_min = DB.Column(DB.Integer)
altitude_max = DB.Column(DB.Integer)
first_valid_data = DB.Column(DB.DateTime)
last_valid_data = DB.Column(DB.DateTime)
count_valid_data = DB.Column(DB.Integer) |
52864307e692f40ddfe170a4b8607c4b2b96bff5 | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.26',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 7.0. | Update the PyPI version to 7.0.
| Python | mit | Doist/todoist-python | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.26',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0. | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.26',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.<commit_after> | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.26',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.26',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
823179066c9a7c101a18e9c2d7ccb2a9ccf7f1cb | setup.py | setup.py | import os
from setuptools import setup, find_packages
from hamcrest import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
import re
matched = re.match('__version__.*', read(os.path.join('hamcrest', '__init__.py')))
if matched:
exec(matched.group())
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
use_2to3 = True,
)
| Support python 3 installtion; need distribute | Support python 3 installtion; need distribute
| Python | bsd-3-clause | msabramo/PyHamcrest,nitishr/PyHamcrest,nitishr/PyHamcrest,msabramo/PyHamcrest | import os
from setuptools import setup, find_packages
from hamcrest import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
)
Support python 3 installtion; need distribute | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
import re
matched = re.match('__version__.*', read(os.path.join('hamcrest', '__init__.py')))
if matched:
exec(matched.group())
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
use_2to3 = True,
)
| <commit_before>import os
from setuptools import setup, find_packages
from hamcrest import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Support python 3 installtion; need distribute<commit_after> | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
import re
matched = re.match('__version__.*', read(os.path.join('hamcrest', '__init__.py')))
if matched:
exec(matched.group())
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
use_2to3 = True,
)
| import os
from setuptools import setup, find_packages
from hamcrest import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
)
Support python 3 installtion; need distributeimport os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
import re
matched = re.match('__version__.*', read(os.path.join('hamcrest', '__init__.py')))
if matched:
exec(matched.group())
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
use_2to3 = True,
)
| <commit_before>import os
from setuptools import setup, find_packages
from hamcrest import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Support python 3 installtion; need distribute<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
import re
matched = re.match('__version__.*', read(os.path.join('hamcrest', '__init__.py')))
if matched:
exec(matched.group())
setup(
name = 'PyHamcrest',
version = __version__,
author = 'Jon Reid',
author_email = 'jon.reid@mac.com',
description = 'Hamcrest framework for matcher objects',
license = 'New BSD',
platforms=['All'],
keywords = 'hamcrest matchers pyunit unit test testing unittest unittesting',
url = 'http://code.google.com/p/hamcrest/',
download_url = 'http://pypi.python.org/packages/source/P/PyHamcrest/PyHamcrest-%s.tar.gz' % __version__,
packages = find_packages(),
test_suite = 'hamcrest-unit-test.alltests',
provides = ['hamcrest'],
long_description=read('README.md'),
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
],
use_2to3 = True,
)
|
c5b73be1bf0f0edd05c4743c2449bee568d01c76 | setup.py | setup.py | from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| Add long description from README | Add long description from README
| Python | mit | Turbasen/turbasen.py | from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
Add long description from README | from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| <commit_before>from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
<commit_msg>Add long description from README<commit_after> | from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
Add long description from READMEfrom distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| <commit_before>from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
<commit_msg>Add long description from README<commit_after>from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='ali.kaafarani@turistforeningen.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
7a80fd081c6d8ece2b199f4a7915dc59d1805437 | setup.py | setup.py | #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
| #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
| Remove support for Python 2 | Remove support for Python 2
| Python | bsd-3-clause | abarto/django_uncertainty | #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
Remove support for Python 2 | #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
| <commit_before>#!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
<commit_msg>Remove support for Python 2<commit_after> | #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
| #!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
Remove support for Python 2#!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
| <commit_before>#!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
<commit_msg>Remove support for Python 2<commit_after>#!/usr/bin/env python
from os import path
from setuptools import setup
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
long_description = f.read()
setup(name='django_uncertainty',
version='1.5',
description='A Django middleware to generate predictable errors on sites',
long_description=long_description,
author='Agustin Barto',
author_email='abarto@gmail.com',
url='https://github.com/abarto/django_uncertainty',
license='BSD',
install_requires=[],
tests_require=['Django>=1.10'],
test_suite='uncertainty.tests.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
packages=['uncertainty'])
|
89dfdc7ebe2fe483d2b306dac83a666aa7c013d7 | setup.py | setup.py | #!/usr/bin/env python
from os.path import join
from setuptools import setup, find_packages
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
| #!/usr/bin/env python
from os.path import dirname, join
from setuptools import setup, find_packages
def read(file_name):
with open(join(dirname(__file__), file_name)) as file_object:
return file_object.read()
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
long_description=read('README.rst'),
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
| Include README for PyPI release | Include README for PyPI release
| Python | mit | ExCiteS/geokey-sapelli,ExCiteS/geokey-sapelli | #!/usr/bin/env python
from os.path import join
from setuptools import setup, find_packages
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
Include README for PyPI release | #!/usr/bin/env python
from os.path import dirname, join
from setuptools import setup, find_packages
def read(file_name):
with open(join(dirname(__file__), file_name)) as file_object:
return file_object.read()
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
long_description=read('README.rst'),
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
| <commit_before>#!/usr/bin/env python
from os.path import join
from setuptools import setup, find_packages
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
<commit_msg>Include README for PyPI release<commit_after> | #!/usr/bin/env python
from os.path import dirname, join
from setuptools import setup, find_packages
def read(file_name):
with open(join(dirname(__file__), file_name)) as file_object:
return file_object.read()
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
long_description=read('README.rst'),
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
| #!/usr/bin/env python
from os.path import join
from setuptools import setup, find_packages
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
Include README for PyPI release#!/usr/bin/env python
from os.path import dirname, join
from setuptools import setup, find_packages
def read(file_name):
with open(join(dirname(__file__), file_name)) as file_object:
return file_object.read()
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
long_description=read('README.rst'),
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
| <commit_before>#!/usr/bin/env python
from os.path import join
from setuptools import setup, find_packages
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
<commit_msg>Include README for PyPI release<commit_after>#!/usr/bin/env python
from os.path import dirname, join
from setuptools import setup, find_packages
def read(file_name):
with open(join(dirname(__file__), file_name)) as file_object:
return file_object.read()
name = 'geokey-sapelli'
version = __import__(name.replace('-', '_')).__version__
repository = join('https://github.com/ExCiteS', name)
setup(
name=name,
version=version,
description='Read Sapelli project and load data from CSVs to GeoKey',
long_description=read('README.rst'),
url=repository,
download_url=join(repository, 'tarball', version),
author='ExCiteS',
author_email='excites@ucl.ac.uk',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*']),
include_package_data=True,
install_requires=['qrcode==5.2.2'],
)
|
2c7924b879be6536f0a6f0f7f78e5813156734af | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='neomodel',
version='0.4.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8', 'lucene-querybuilder==0.2'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
| from setuptools import setup, find_packages
setup(
name='neomodel',
version='1.0.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
| Remove lucene query builder and bumper version | Remove lucene query builder and bumper version
| Python | mit | bleib1dj/neomodel,fpieper/neomodel,robinedwards/neomodel,cristigociu/neomodel_dh,bleib1dj/neomodel,pombredanne/neomodel,robinedwards/neomodel,andrefsp/neomodel,wcooley/neomodel | from setuptools import setup, find_packages
setup(
name='neomodel',
version='0.4.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8', 'lucene-querybuilder==0.2'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
Remove lucene query builder and bumper version | from setuptools import setup, find_packages
setup(
name='neomodel',
version='1.0.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
| <commit_before>from setuptools import setup, find_packages
setup(
name='neomodel',
version='0.4.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8', 'lucene-querybuilder==0.2'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
<commit_msg>Remove lucene query builder and bumper version<commit_after> | from setuptools import setup, find_packages
setup(
name='neomodel',
version='1.0.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
| from setuptools import setup, find_packages
setup(
name='neomodel',
version='0.4.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8', 'lucene-querybuilder==0.2'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
Remove lucene query builder and bumper versionfrom setuptools import setup, find_packages
setup(
name='neomodel',
version='1.0.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
| <commit_before>from setuptools import setup, find_packages
setup(
name='neomodel',
version='0.4.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8', 'lucene-querybuilder==0.2'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
<commit_msg>Remove lucene query builder and bumper version<commit_after>from setuptools import setup, find_packages
setup(
name='neomodel',
version='1.0.0',
description='An object mapper for the neo4j graph database.',
long_description=open('README.rst').read(),
author='Robin Edwards',
author_email='robin.ge@gmail.com',
zip_safe=True,
url='http://github.com/robinedwards/neomodel',
license='MIT',
packages=find_packages(),
keywords='graph neo4j py2neo ORM',
tests_require=['nose==1.1.2'],
test_suite='nose.collector',
install_requires=['py2neo==1.6.1', 'pytz==2013.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Database",
])
|
69281da6f69bbdc5cfb832efa0b0c1b7810eb262 | setup.py | setup.py | # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
)
readme_file.close()
| # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
readme_file.close()
| Mark as compatible for python 2.7, 3.3 and 3.4 | Mark as compatible for python 2.7, 3.3 and 3.4
Add `classifiers` parameter to `setup` function call in `setup.py` file. | Python | mit | victor-o-silva/db_file_storage,victor-o-silva/db_file_storage | # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
)
readme_file.close()
Mark as compatible for python 2.7, 3.3 and 3.4
Add `classifiers` parameter to `setup` function call in `setup.py` file. | # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
readme_file.close()
| <commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
)
readme_file.close()
<commit_msg>Mark as compatible for python 2.7, 3.3 and 3.4
Add `classifiers` parameter to `setup` function call in `setup.py` file.<commit_after> | # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
readme_file.close()
| # -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
)
readme_file.close()
Mark as compatible for python 2.7, 3.3 and 3.4
Add `classifiers` parameter to `setup` function call in `setup.py` file.# -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
readme_file.close()
| <commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
)
readme_file.close()
<commit_msg>Mark as compatible for python 2.7, 3.3 and 3.4
Add `classifiers` parameter to `setup` function call in `setup.py` file.<commit_after># -*- coding: utf-8 -*-
from distutils.core import setup
readme_file = open('README.rst')
setup(
name='django-db-file-storage',
version='0.3.1',
author='Victor Oliveira da Silva',
author_email='victor_o_silva@hotmail.com',
packages=['db_file_storage'],
url='https://github.com/victor-o-silva/db_file_storage',
download_url='https://github.com/victor-o-silva/db_file_storage'
'/tarball/0.3.1',
license='GNU GPL v3',
description="Custom FILE_STORAGE for Django. Saves files "
"in your database instead of your file system.",
long_description=readme_file.read(),
install_requires=[
"Django",
],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
readme_file.close()
|
b04ef68d079bf4ef172c6fa7f84946369b080cb7 | setup.py | setup.py | """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'flask',
'flask-socketio',
'tornado',
'werkzeug',
'pyzmq'
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
| """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'aiozmq',
'flask',
'flask-socketio',
'pyzmq',
'tornado',
'websockets',
'werkzeug',
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
| Add to extras_require examples section | Add to extras_require examples section
| Python | mit | bcb/jsonrpcserver | """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'flask',
'flask-socketio',
'tornado',
'werkzeug',
'pyzmq'
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
Add to extras_require examples section | """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'aiozmq',
'flask',
'flask-socketio',
'pyzmq',
'tornado',
'websockets',
'werkzeug',
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
| <commit_before>"""setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'flask',
'flask-socketio',
'tornado',
'werkzeug',
'pyzmq'
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
<commit_msg>Add to extras_require examples section<commit_after> | """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'aiozmq',
'flask',
'flask-socketio',
'pyzmq',
'tornado',
'websockets',
'werkzeug',
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
| """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'flask',
'flask-socketio',
'tornado',
'werkzeug',
'pyzmq'
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
Add to extras_require examples section"""setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'aiozmq',
'flask',
'flask-socketio',
'pyzmq',
'tornado',
'websockets',
'werkzeug',
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
| <commit_before>"""setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'flask',
'flask-socketio',
'tornado',
'werkzeug',
'pyzmq'
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
<commit_msg>Add to extras_require examples section<commit_after>"""setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
README = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
HISTORY = f.read()
setup(
name='jsonrpcserver',
version='3.4.0',
description='Process JSON-RPC requests',
long_description=README + '\n\n' + HISTORY,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcserver.readthedocs.io/',
license='MIT',
packages=['jsonrpcserver'],
package_data={'jsonrpcserver': ['request-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'six', 'funcsigs'],
extras_require={
'tox': ['tox','pylint'],
'examples': [
'aiohttp',
'aiozmq',
'flask',
'flask-socketio',
'pyzmq',
'tornado',
'websockets',
'werkzeug',
]
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
|
ad889be15a374ff07492b549ba454bafd3a76fd7 | setup.py | setup.py | import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
| import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
| Cut out logo in README uploaded to PyPi | Cut out logo in README uploaded to PyPi
| Python | mit | SectorLabs/django-postgres-extra | import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
Cut out logo in README uploaded to PyPi | import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
| <commit_before>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
<commit_msg>Cut out logo in README uploaded to PyPi<commit_after> | import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
| import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
Cut out logo in README uploaded to PyPiimport os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
| <commit_before>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
<commit_msg>Cut out logo in README uploaded to PyPi<commit_after>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
3ab6dbf87053634ce48627787151a24b33b546af | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.5',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.6',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 0.2.6. | Update the PyPI version to 0.2.6.
| Python | mit | Doist/todoist-python,electronick1/todoist-python | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.5',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 0.2.6. | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.6',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.5',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 0.2.6.<commit_after> | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.6',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.5',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 0.2.6.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.6',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.5',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 0.2.6.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.6',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
b98b804ea83723d476f50a46e9ba4dbe8fc6e4a4 | setup.py | setup.py | from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.5',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
| from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.6',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'cffi>=1.10.0',
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
| Add cffi to the package requirements. | Add cffi to the package requirements.
| Python | mit | ApplauseOSS/kms-encryption-toolbox | from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.5',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
Add cffi to the package requirements. | from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.6',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'cffi>=1.10.0',
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
| <commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.5',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
<commit_msg>Add cffi to the package requirements.<commit_after> | from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.6',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'cffi>=1.10.0',
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
| from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.5',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
Add cffi to the package requirements.from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.6',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'cffi>=1.10.0',
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
| <commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.5',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
<commit_msg>Add cffi to the package requirements.<commit_after>from setuptools import find_packages
from setuptools import setup
setup(
name='kms-encryption-toolbox',
version='0.0.6',
url='https://github.com/ApplauseOSS/kms-encryption-toolbox',
license='Applause',
description='Encryption toolbox to be used with the Amazon Key Management Service for securing your deployment secrets. It encapsulates the aws-encryption-sdk package to expose cmdline actions.',
author='Applause',
author_email='architecture@applause.com',
zip_safe=False,
packages=find_packages(),
install_requires=[
'cffi>=1.10.0',
'aws-encryption-sdk>=1.2.0',
'click>=6.6',
'attrs>=16.3.0,<17.0.0'
],
entry_points={
"console_scripts": [
"kms-encryption = kmsencryption.__main__:main",
]
},
scripts=["kmsencryption/scripts/decrypt-and-start.sh"]
)
|
548d5b46607d1023e7b0a95c6a2995419fa06b50 | fickle/api.py | fickle/api.py | import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['PUT'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
| import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['POST'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
| Change HTTP method for validate endpoint | Change HTTP method for validate endpoint
| Python | mit | norbert/fickle | import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['PUT'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
Change HTTP method for validate endpoint | import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['POST'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
| <commit_before>import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['PUT'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
<commit_msg>Change HTTP method for validate endpoint<commit_after> | import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['POST'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
| import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['PUT'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
Change HTTP method for validate endpointimport flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['POST'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
| <commit_before>import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['PUT'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
<commit_msg>Change HTTP method for validate endpoint<commit_after>import flask
from flask import request, json
def Response(data, status = 200):
body = json.dumps(data)
return flask.Response(body, status = status, mimetype = 'application/json')
def SuccessResponse(dataset_id = None):
return Response({ 'success': True, 'id': dataset_id })
def ErrorResponse(status = 400):
return Response({ 'success': False }, status = status)
def API(name, backend):
app = flask.Flask(name)
app.config.from_object(name)
@app.route('/')
def api_root():
return SuccessResponse(backend.dataset_id)
@app.route('/load', methods=['POST'])
def api_load():
backend.load(request.json)
return SuccessResponse(backend.dataset_id)
@app.route('/fit', methods=['POST'])
def api_fit():
if not backend.loaded():
return ErrorResponse()
backend.fit()
return SuccessResponse(backend.dataset_id)
@app.route('/validate', methods=['POST'])
def api_validate():
if not backend.loaded():
return ErrorResponse()
data = backend.validate()
return Response(data)
@app.route('/predict', methods=['POST'])
def api_predict():
if not backend.trained():
return ErrorResponse()
data = backend.predict(request.json).tolist()
return Response(data)
return app
|
6007c5800ea1f59ac26417b5081323c0b40446ad | almanacbot/almanac-bot.py | almanacbot/almanac-bot.py | import json
import logging
import logging.config
import os
import sys
import config
import constants
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
configuration = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
| import json
import logging
import logging.config
import os
import sys
import config
import constants
import twitter
conf = None
twitter_api = None
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
def setup_twitter():
logging.info("Setting up Twitter API client...")
twitter_api = twitter.Api(
consumer_key=conf.config["twitter"]["consumer_key"],
consumer_secret=conf.config["twitter"]["consumer_secret"],
access_token_key=conf.config["twitter"]["access_token_key"],
access_token_secret=conf.config["twitter"]["access_token_secret"])
logging.info("Verifying Twitter API client credentials...")
twitter_api.VerifyCredentials()
logging.info("Twitter API client credentials verified.")
logging.info("Twitter API client set up.")
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
conf = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
try:
setup_twitter()
except Exception as exc:
logging.error("Error setting up Twitter API client.", exc)
sys.exit(1)
| Add Twitter API client initialization. | Add Twitter API client initialization.
| Python | mit | logoff/almanac-bot,logoff/almanac-bot | import json
import logging
import logging.config
import os
import sys
import config
import constants
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
configuration = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
Add Twitter API client initialization. | import json
import logging
import logging.config
import os
import sys
import config
import constants
import twitter
conf = None
twitter_api = None
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
def setup_twitter():
logging.info("Setting up Twitter API client...")
twitter_api = twitter.Api(
consumer_key=conf.config["twitter"]["consumer_key"],
consumer_secret=conf.config["twitter"]["consumer_secret"],
access_token_key=conf.config["twitter"]["access_token_key"],
access_token_secret=conf.config["twitter"]["access_token_secret"])
logging.info("Verifying Twitter API client credentials...")
twitter_api.VerifyCredentials()
logging.info("Twitter API client credentials verified.")
logging.info("Twitter API client set up.")
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
conf = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
try:
setup_twitter()
except Exception as exc:
logging.error("Error setting up Twitter API client.", exc)
sys.exit(1)
| <commit_before>import json
import logging
import logging.config
import os
import sys
import config
import constants
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
configuration = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
<commit_msg>Add Twitter API client initialization.<commit_after> | import json
import logging
import logging.config
import os
import sys
import config
import constants
import twitter
conf = None
twitter_api = None
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
def setup_twitter():
logging.info("Setting up Twitter API client...")
twitter_api = twitter.Api(
consumer_key=conf.config["twitter"]["consumer_key"],
consumer_secret=conf.config["twitter"]["consumer_secret"],
access_token_key=conf.config["twitter"]["access_token_key"],
access_token_secret=conf.config["twitter"]["access_token_secret"])
logging.info("Verifying Twitter API client credentials...")
twitter_api.VerifyCredentials()
logging.info("Twitter API client credentials verified.")
logging.info("Twitter API client set up.")
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
conf = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
try:
setup_twitter()
except Exception as exc:
logging.error("Error setting up Twitter API client.", exc)
sys.exit(1)
| import json
import logging
import logging.config
import os
import sys
import config
import constants
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
configuration = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
Add Twitter API client initialization.import json
import logging
import logging.config
import os
import sys
import config
import constants
import twitter
conf = None
twitter_api = None
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
def setup_twitter():
logging.info("Setting up Twitter API client...")
twitter_api = twitter.Api(
consumer_key=conf.config["twitter"]["consumer_key"],
consumer_secret=conf.config["twitter"]["consumer_secret"],
access_token_key=conf.config["twitter"]["access_token_key"],
access_token_secret=conf.config["twitter"]["access_token_secret"])
logging.info("Verifying Twitter API client credentials...")
twitter_api.VerifyCredentials()
logging.info("Twitter API client credentials verified.")
logging.info("Twitter API client set up.")
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
conf = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
try:
setup_twitter()
except Exception as exc:
logging.error("Error setting up Twitter API client.", exc)
sys.exit(1)
| <commit_before>import json
import logging
import logging.config
import os
import sys
import config
import constants
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
configuration = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
<commit_msg>Add Twitter API client initialization.<commit_after>import json
import logging
import logging.config
import os
import sys
import config
import constants
import twitter
conf = None
twitter_api = None
def setup_logging(
path='logging.json',
log_level=logging.DEBUG,
env_key='LOG_CFG'
):
env_path = os.getenv(env_key, None)
if env_path:
path = env_path
if os.path.exists(path):
with open(path, 'rt') as f:
log_conf = json.load(f)
logging.config.dictConfig(log_conf)
else:
logging.basicConfig(level=log_level)
def setup_twitter():
logging.info("Setting up Twitter API client...")
twitter_api = twitter.Api(
consumer_key=conf.config["twitter"]["consumer_key"],
consumer_secret=conf.config["twitter"]["consumer_secret"],
access_token_key=conf.config["twitter"]["access_token_key"],
access_token_secret=conf.config["twitter"]["access_token_secret"])
logging.info("Verifying Twitter API client credentials...")
twitter_api.VerifyCredentials()
logging.info("Twitter API client credentials verified.")
logging.info("Twitter API client set up.")
if __name__ == '__main__':
# configure logger
setup_logging()
# read configuration
try:
conf = config.Configuration(constants.CONFIG_FILE_NAME)
except Exception as exc:
logging.error("Error getting configuration.", exc)
sys.exit(1)
try:
setup_twitter()
except Exception as exc:
logging.error("Error setting up Twitter API client.", exc)
sys.exit(1)
|
ae966a3cb7f99e5604c8302680f125e12087003a | blackhole/state.py | blackhole/state.py |
class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
def get_reading(self):
return self._reading_data
| class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
@property
def reading(self):
return self._reading_data
| Change getter in to property | Change getter in to property | Python | mit | kura/blackhole,kura/blackhole |
class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
def get_reading(self):
return self._reading_data
Change getter in to property | class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
@property
def reading(self):
return self._reading_data
| <commit_before>
class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
def get_reading(self):
return self._reading_data
<commit_msg>Change getter in to property<commit_after> | class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
@property
def reading(self):
return self._reading_data
|
class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
def get_reading(self):
return self._reading_data
Change getter in to propertyclass MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
@property
def reading(self):
return self._reading_data
| <commit_before>
class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
def get_reading(self):
return self._reading_data
<commit_msg>Change getter in to property<commit_after>class MailState():
_reading_data = False
def set_reading(self, val):
self._reading_data = val
@property
def reading(self):
return self._reading_data
|
e50fc12459e6ff77864fe499b512a57e89f7ead2 | pi_control_service/gpio_service.py | pi_control_service/gpio_service.py | from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except:
pass
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
| from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
| Send exception message in response | Send exception message in response
| Python | mit | projectweekend/Pi-Control-Service,HydAu/ProjectWeekds_Pi-Control-Service | from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except:
pass
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
Send exception message in response | from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
| <commit_before>from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except:
pass
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
<commit_msg>Send exception message in response<commit_after> | from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
| from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except:
pass
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
Send exception message in responsefrom rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
| <commit_before>from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except:
pass
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
<commit_msg>Send exception message in response<commit_after>from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
|
fe4fec66cbf4100752c4b7414090019ab8ddb8ce | ideascube/conf/idb_bdi.py | ideascube/conf/idb_bdi.py | """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
| """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
| Add cards for Ideasbox in Burundi | Add cards for Ideasbox in Burundi
| Python | agpl-3.0 | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube | """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
Add cards for Ideasbox in Burundi | """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
| <commit_before>"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
<commit_msg>Add cards for Ideasbox in Burundi<commit_after> | """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
| """Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
Add cards for Ideasbox in Burundi"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
| <commit_before>"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
<commit_msg>Add cards for Ideasbox in Burundi<commit_after>"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
|
d75a79d10658ad32a9b1d71e472372d8335c7bb6 | ml/test_amaranth_lib.py | ml/test_amaranth_lib.py | # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_load_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
| # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_combine_dataframes(self):
raise NotImplementedError
def test_get_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
| Update testing stubs with helper lib changes | Update testing stubs with helper lib changes
| Python | apache-2.0 | googleinterns/amaranth,googleinterns/amaranth | # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_load_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
Update testing stubs with helper lib changes | # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_combine_dataframes(self):
raise NotImplementedError
def test_get_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
| <commit_before># Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_load_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
<commit_msg>Update testing stubs with helper lib changes<commit_after> | # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_combine_dataframes(self):
raise NotImplementedError
def test_get_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
| # Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_load_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
Update testing stubs with helper lib changes# Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_combine_dataframes(self):
raise NotImplementedError
def test_get_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
| <commit_before># Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_load_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
<commit_msg>Update testing stubs with helper lib changes<commit_after># Lint as: python3
"""These tests ensure correctness for the helper functions in amaranth_lib."""
import unittest
class TestAmaranthHelpers(unittest.TestCase):
def test_combine_dataframes(self):
raise NotImplementedError
def test_get_calorie_data(self):
raise NotImplementedError
def test_clean_data(self):
raise NotImplementedError
def test_add_calorie_labels(self):
raise NotImplementedError
def test_num_unique_words(self):
raise NotImplementedError
def test_max_sequence_length(self):
raise NotImplementedError
def test_add_input_labels(self):
raise NotImplementedError
if __name__ == '__main__':
unittest.main()
|
3bc52b94479e3b0e147ff6ef4bcc8379a5b57249 | trunk/mobile_portal/mobile_portal/core/middleware.py | trunk/mobile_portal/mobile_portal/core/middleware.py | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'opera_mini_ver1' in device_parents[request.browser.devid]:
opera_device = request.META.get('HTTP_X_OPERAMINI_PHONE')
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'HTTP_X_OPERAMINI_PHONE' in request.META:
opera_device = request.META['HTTP_X_OPERAMINI_PHONE']
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response | Fix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header. | Fix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header.
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'opera_mini_ver1' in device_parents[request.browser.devid]:
opera_device = request.META.get('HTTP_X_OPERAMINI_PHONE')
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return responseFix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header. | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'HTTP_X_OPERAMINI_PHONE' in request.META:
opera_device = request.META['HTTP_X_OPERAMINI_PHONE']
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response | <commit_before>from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'opera_mini_ver1' in device_parents[request.browser.devid]:
opera_device = request.META.get('HTTP_X_OPERAMINI_PHONE')
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response<commit_msg>Fix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header.<commit_after> | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'HTTP_X_OPERAMINI_PHONE' in request.META:
opera_device = request.META['HTTP_X_OPERAMINI_PHONE']
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response | from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'opera_mini_ver1' in device_parents[request.browser.devid]:
opera_device = request.META.get('HTTP_X_OPERAMINI_PHONE')
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return responseFix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header.from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'HTTP_X_OPERAMINI_PHONE' in request.META:
opera_device = request.META['HTTP_X_OPERAMINI_PHONE']
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response | <commit_before>from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'opera_mini_ver1' in device_parents[request.browser.devid]:
opera_device = request.META.get('HTTP_X_OPERAMINI_PHONE')
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response<commit_msg>Fix bug when Opera Mini (and possibly others) present no X-OperaMini-Phone header.<commit_after>from django.conf import settings
import geolocation
from mobile_portal.wurfl.wurfl_data import devices
from mobile_portal.wurfl import device_parents
from pywurfl.algorithms import DeviceNotFound
from mobile_portal.wurfl.vsm import VectorSpaceAlgorithm
class LocationMiddleware(object):
vsa = VectorSpaceAlgorithm(devices)
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
try:
request.browser = devices.select_ua(
request.META['HTTP_USER_AGENT'],
search=LocationMiddleware.vsa
)
except (KeyError, DeviceNotFound):
request.browser = devices.select_id('generic_xhtml')
if 'HTTP_X_OPERAMINI_PHONE' in request.META:
opera_device = request.META['HTTP_X_OPERAMINI_PHONE']
request.device = devices.select_ua(
opera_device,
search=LocationMiddleware.vsa
)
else:
request.device = request.browser
from django.db import connection
class PrintQueriesMiddleware(object):
def process_response(self, request, response):
for query in connection.queries:
print '-'*80
print query['sql']
return response |
ef81886e4ecf08c12783e0cc2b934ed812accb97 | Zika_vdb_upload.py | Zika_vdb_upload.py | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 1:'strain', 2:'date', 4:'country', 5:'division', 6:'location'}
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 2:'strain', 4:'date', 6:'country'}
# 0 1 2 3 4 5 6
# >KU647676|Zika_virus|MRS_OPY_Martinique_PaRi_2015|NA|2015_12|Human|Martinique
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() | Modify Zika fasta fields to match default VIPRBRC ordering. | Modify Zika fasta fields to match default VIPRBRC ordering.
| Python | agpl-3.0 | blab/nextstrain-db,nextstrain/fauna,blab/nextstrain-db,nextstrain/fauna | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 1:'strain', 2:'date', 4:'country', 5:'division', 6:'location'}
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload()Modify Zika fasta fields to match default VIPRBRC ordering. | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 2:'strain', 4:'date', 6:'country'}
# 0 1 2 3 4 5 6
# >KU647676|Zika_virus|MRS_OPY_Martinique_PaRi_2015|NA|2015_12|Human|Martinique
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() | <commit_before>import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 1:'strain', 2:'date', 4:'country', 5:'division', 6:'location'}
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload()<commit_msg>Modify Zika fasta fields to match default VIPRBRC ordering.<commit_after> | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 2:'strain', 4:'date', 6:'country'}
# 0 1 2 3 4 5 6
# >KU647676|Zika_virus|MRS_OPY_Martinique_PaRi_2015|NA|2015_12|Human|Martinique
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() | import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 1:'strain', 2:'date', 4:'country', 5:'division', 6:'location'}
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload()Modify Zika fasta fields to match default VIPRBRC ordering.import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 2:'strain', 4:'date', 6:'country'}
# 0 1 2 3 4 5 6
# >KU647676|Zika_virus|MRS_OPY_Martinique_PaRi_2015|NA|2015_12|Human|Martinique
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() | <commit_before>import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 1:'strain', 2:'date', 4:'country', 5:'division', 6:'location'}
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload()<commit_msg>Modify Zika fasta fields to match default VIPRBRC ordering.<commit_after>import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from vdb_upload import vdb_upload
from vdb_upload import parser
class Zika_vdb_upload(vdb_upload):
def __init__(self, fasta_fields, fasta_fname, database, virus, source, locus=None, vsubtype=None, authors=None, path=None, auth_key=None):
'''
:param fasta_fields: Dictionary defining position in fasta field to be included in database
'''
vdb_upload.__init__(self, fasta_fields, fasta_fname, database, virus, source, locus, vsubtype, authors, path, auth_key)
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = {0:'accession', 2:'strain', 4:'date', 6:'country'}
# 0 1 2 3 4 5 6
# >KU647676|Zika_virus|MRS_OPY_Martinique_PaRi_2015|NA|2015_12|Human|Martinique
run = Zika_vdb_upload(fasta_fields, fasta_fname=args.fname, database=args.database, virus=args.virus, source=args.source, locus=args.locus, vsubtype=args.subtype, authors=args.authors, path=args.path)
run.upload() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.