commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3082fb50c028979643d479c0e65cb92ef12de586 | hunter/udacity.py | hunter/udacity.py | import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
| import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
| Add request to inside of try | Add request to inside of try
| Python | mit | anapaulagomes/reviews-assigner | import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
Add request to inside of try | import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
| <commit_before>import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
<commit_msg>Add request to inside of try<commit_after> | import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
| import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
Add request to inside of tryimport requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
| <commit_before>import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
<commit_msg>Add request to inside of try<commit_after>import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
|
047f061514740d7935cce8e2aa25ef138b93f91f | deployer/__init__.py | deployer/__init__.py | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.11'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.4.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| Prepare for next development version | Prepare for next development version
| Python | mit | totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.11'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Prepare for next development version | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.4.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| <commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.11'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Prepare for next development version<commit_after> | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.4.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.11'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Prepare for next development versionfrom __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.4.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| <commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.11'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Prepare for next development version<commit_after>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.4.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
3e37885f7241b985740bf753ca237c31497ac57e | courriers/management/commands/mailjet_sync_unsubscribed.py | courriers/management/commands/mailjet_sync_unsubscribed.py | from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = []
for contact in mailjet_contacts['result']:
mailjet_users.append(contact['email'])
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
| from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
| Use list comprehension for mailjet_users list | Use list comprehension for mailjet_users list
| Python | mit | ulule/django-courriers,ulule/django-courriers | from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = []
for contact in mailjet_contacts['result']:
mailjet_users.append(contact['email'])
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
Use list comprehension for mailjet_users list | from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
| <commit_before>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = []
for contact in mailjet_contacts['result']:
mailjet_users.append(contact['email'])
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
<commit_msg>Use list comprehension for mailjet_users list<commit_after> | from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
| from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = []
for contact in mailjet_contacts['result']:
mailjet_users.append(contact['email'])
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
Use list comprehension for mailjet_users listfrom django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
| <commit_before>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = []
for contact in mailjet_contacts['result']:
mailjet_users.append(contact['email'])
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
<commit_msg>Use list comprehension for mailjet_users list<commit_after>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
fa089cabcc3b3aa71bcc86e028066e3dbcf25d5d | commands/cmd_help.py | commands/cmd_help.py | from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', '.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
| from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', /'.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
| Add slash to all command aliases | Add slash to all command aliases
| Python | agpl-3.0 | TheReverend403/Pyper,TheReverend403/Pyper | from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', '.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
Add slash to all command aliases | from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', /'.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
| <commit_before>from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', '.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
<commit_msg>Add slash to all command aliases<commit_after> | from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', /'.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
| from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', '.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
Add slash to all command aliasesfrom lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', /'.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
| <commit_before>from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', '.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
<commit_msg>Add slash to all command aliases<commit_after>from lib.command import Command
class HelpCommand(Command):
name = 'help'
aliases = ['start']
description = 'Lists all bot commands and their descriptions.'
def run(self, message, args):
reply = 'Hi! I\'m {0} and these are my commands: \n\n'.format(self.bot.telegram.get_me().username)
for command in self.bot.commands.values():
reply += '/{0} - {1}'.format(command.name, command.description)
if hasattr(command, 'aliases'):
reply += ' (Aliases: /{0})'.format(', /'.join(command.aliases))
reply += '\n'
reply += '\nYou can find my source code at https://github.com/TheReverend403/Piper'
self.reply(message, reply)
|
ce3028f29e40ce72693394798ca0a8daa98c1a4a | data/make_hash_dict.py | data/make_hash_dict.py | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict
if __name__ == "__main__":
hash_dict = make_hash_dict("ds005")
with open("ds005_hashes.json", "x", newline = "\n") as outfile:
json.dump(hash_dict, outfile) | Make JSON containing paths and hashs | WIP: Make JSON containing paths and hashs
| Python | bsd-3-clause | berkeley-stat159/project-delta | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dictWIP: Make JSON containing paths and hashs | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict
if __name__ == "__main__":
hash_dict = make_hash_dict("ds005")
with open("ds005_hashes.json", "x", newline = "\n") as outfile:
json.dump(hash_dict, outfile) | <commit_before>import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict<commit_msg>WIP: Make JSON containing paths and hashs<commit_after> | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict
if __name__ == "__main__":
hash_dict = make_hash_dict("ds005")
with open("ds005_hashes.json", "x", newline = "\n") as outfile:
json.dump(hash_dict, outfile) | import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dictWIP: Make JSON containing paths and hashsimport data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict
if __name__ == "__main__":
hash_dict = make_hash_dict("ds005")
with open("ds005_hashes.json", "x", newline = "\n") as outfile:
json.dump(hash_dict, outfile) | <commit_before>import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict<commit_msg>WIP: Make JSON containing paths and hashs<commit_after>import data, json, os, sys
def make_hash_dict(top):
"""
Returns a dictionary with file paths and corresponding hashes.
Parameters
----------
data : str
The path to the directory containing files needing hashes.
Returns
-------
hash_dict : dict
Dictionary with file paths as keys and hashes as values.
"""
paths = [os.path.join(root, files) for root, dirs, files in os.walk(top)]
# generate_file_md5() takes as input a file path and outputs its hash
hash_dict = [data.generate_file_md5(path) for path in paths]
return hash_dict
if __name__ == "__main__":
hash_dict = make_hash_dict("ds005")
with open("ds005_hashes.json", "x", newline = "\n") as outfile:
json.dump(hash_dict, outfile) |
8660d5570144894cf4e6e07b3a30526b35575dce | test/Analysis/analyzer_test.py | test/Analysis/analyzer_test.py | import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=range')
if result.code == lit.Test.FAIL:
return result
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3')
return result
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
| import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
results = []
# Parse any test requirements ('REQUIRES: ')
saved_test = test
lit.TestRunner.parseIntegratedTestScript(test)
if 'z3' not in test.requires:
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=range'))
if results[-1].code == lit.Test.FAIL:
return results[-1]
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3'))
# Combine all result outputs into the last element
for x in results:
if x != results[-1]:
results[-1].output = x.output + results[-1].output
if results:
return results[-1]
return lit.Test.Result(lit.Test.UNSUPPORTED,
"Test requires the following unavailable features: z3")
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
| Improve test handling with multiple constraint managers | [analyzer]: Improve test handling with multiple constraint managers
Summary: Modify the test infrastructure to properly handle tests that require z3, and merge together the output of all tests on success. This is required for D28954.
Reviewers: dcoughlin, zaks.anna, NoQ, xazax.hun
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D33308
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@305480 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang | import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=range')
if result.code == lit.Test.FAIL:
return result
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3')
return result
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
[analyzer]: Improve test handling with multiple constraint managers
Summary: Modify the test infrastructure to properly handle tests that require z3, and merge together the output of all tests on success. This is required for D28954.
Reviewers: dcoughlin, zaks.anna, NoQ, xazax.hun
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D33308
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@305480 91177308-0d34-0410-b5e6-96231b3b80d8 | import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
results = []
# Parse any test requirements ('REQUIRES: ')
saved_test = test
lit.TestRunner.parseIntegratedTestScript(test)
if 'z3' not in test.requires:
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=range'))
if results[-1].code == lit.Test.FAIL:
return results[-1]
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3'))
# Combine all result outputs into the last element
for x in results:
if x != results[-1]:
results[-1].output = x.output + results[-1].output
if results:
return results[-1]
return lit.Test.Result(lit.Test.UNSUPPORTED,
"Test requires the following unavailable features: z3")
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
| <commit_before>import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=range')
if result.code == lit.Test.FAIL:
return result
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3')
return result
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
<commit_msg>[analyzer]: Improve test handling with multiple constraint managers
Summary: Modify the test infrastructure to properly handle tests that require z3, and merge together the output of all tests on success. This is required for D28954.
Reviewers: dcoughlin, zaks.anna, NoQ, xazax.hun
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D33308
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@305480 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after> | import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
results = []
# Parse any test requirements ('REQUIRES: ')
saved_test = test
lit.TestRunner.parseIntegratedTestScript(test)
if 'z3' not in test.requires:
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=range'))
if results[-1].code == lit.Test.FAIL:
return results[-1]
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3'))
# Combine all result outputs into the last element
for x in results:
if x != results[-1]:
results[-1].output = x.output + results[-1].output
if results:
return results[-1]
return lit.Test.Result(lit.Test.UNSUPPORTED,
"Test requires the following unavailable features: z3")
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
| import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=range')
if result.code == lit.Test.FAIL:
return result
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3')
return result
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
[analyzer]: Improve test handling with multiple constraint managers
Summary: Modify the test infrastructure to properly handle tests that require z3, and merge together the output of all tests on success. This is required for D28954.
Reviewers: dcoughlin, zaks.anna, NoQ, xazax.hun
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D33308
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@305480 91177308-0d34-0410-b5e6-96231b3b80d8import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
results = []
# Parse any test requirements ('REQUIRES: ')
saved_test = test
lit.TestRunner.parseIntegratedTestScript(test)
if 'z3' not in test.requires:
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=range'))
if results[-1].code == lit.Test.FAIL:
return results[-1]
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3'))
# Combine all result outputs into the last element
for x in results:
if x != results[-1]:
results[-1].output = x.output + results[-1].output
if results:
return results[-1]
return lit.Test.Result(lit.Test.UNSUPPORTED,
"Test requires the following unavailable features: z3")
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
| <commit_before>import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=range')
if result.code == lit.Test.FAIL:
return result
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
result = self.executeWithAnalyzeSubstitution(
test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3')
return result
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
<commit_msg>[analyzer]: Improve test handling with multiple constraint managers
Summary: Modify the test infrastructure to properly handle tests that require z3, and merge together the output of all tests on success. This is required for D28954.
Reviewers: dcoughlin, zaks.anna, NoQ, xazax.hun
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D33308
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@305480 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>import lit.formats
import lit.TestRunner
# Custom format class for static analyzer tests
class AnalyzerTest(lit.formats.ShTest):
def execute(self, test, litConfig):
results = []
# Parse any test requirements ('REQUIRES: ')
saved_test = test
lit.TestRunner.parseIntegratedTestScript(test)
if 'z3' not in test.requires:
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=range'))
if results[-1].code == lit.Test.FAIL:
return results[-1]
# If z3 backend available, add an additional run line for it
if test.config.clang_staticanalyzer_z3 == '1':
results.append(self.executeWithAnalyzeSubstitution(
saved_test, litConfig, '-analyzer-constraints=z3 -DANALYZER_CM_Z3'))
# Combine all result outputs into the last element
for x in results:
if x != results[-1]:
results[-1].output = x.output + results[-1].output
if results:
return results[-1]
return lit.Test.Result(lit.Test.UNSUPPORTED,
"Test requires the following unavailable features: z3")
def executeWithAnalyzeSubstitution(self, test, litConfig, substitution):
saved_substitutions = list(test.config.substitutions)
test.config.substitutions.append(('%analyze', substitution))
result = lit.TestRunner.executeShTest(test, litConfig,
self.execute_external)
test.config.substitutions = saved_substitutions
return result
|
ab1ea39c020e39bdc7b739e2ab49b1cacbecd081 | detectem/ws.py | detectem/ws.py | import sys
import json
from detectem.exceptions import SplashError, NoPluginsError
try:
from bottle import run, post, request
except ImportError:
print("Install bottle to use the web service ..")
sys.exit(0)
from detectem.cli import get_detection_results
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
run(host='0.0.0.0', port=5723)
| import sys
import json
try:
import bottle
from bottle import run, post, request
except ImportError:
print('[+] Install bottle to use the web service')
sys.exit(0)
from detectem.exceptions import SplashError, NoPluginsError
from detectem.cli import get_detection_results
from detectem.settings import DEBUG
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
def main():
bottle.debug(DEBUG)
run(host='0.0.0.0', port=5723)
if __name__ == '__main__':
main()
| Add debug mode to webservice | Add debug mode to webservice
Add main function to avoid executing the webserver in tests
Reorder imports
| Python | mit | spectresearch/detectem | import sys
import json
from detectem.exceptions import SplashError, NoPluginsError
try:
from bottle import run, post, request
except ImportError:
print("Install bottle to use the web service ..")
sys.exit(0)
from detectem.cli import get_detection_results
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
run(host='0.0.0.0', port=5723)
Add debug mode to webservice
Add main function to avoid executing the webserver in tests
Reorder imports | import sys
import json
try:
import bottle
from bottle import run, post, request
except ImportError:
print('[+] Install bottle to use the web service')
sys.exit(0)
from detectem.exceptions import SplashError, NoPluginsError
from detectem.cli import get_detection_results
from detectem.settings import DEBUG
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
def main():
bottle.debug(DEBUG)
run(host='0.0.0.0', port=5723)
if __name__ == '__main__':
main()
| <commit_before>import sys
import json
from detectem.exceptions import SplashError, NoPluginsError
try:
from bottle import run, post, request
except ImportError:
print("Install bottle to use the web service ..")
sys.exit(0)
from detectem.cli import get_detection_results
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
run(host='0.0.0.0', port=5723)
<commit_msg>Add debug mode to webservice
Add main function to avoid executing the webserver in tests
Reorder imports<commit_after> | import sys
import json
try:
import bottle
from bottle import run, post, request
except ImportError:
print('[+] Install bottle to use the web service')
sys.exit(0)
from detectem.exceptions import SplashError, NoPluginsError
from detectem.cli import get_detection_results
from detectem.settings import DEBUG
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
def main():
bottle.debug(DEBUG)
run(host='0.0.0.0', port=5723)
if __name__ == '__main__':
main()
| import sys
import json
from detectem.exceptions import SplashError, NoPluginsError
try:
from bottle import run, post, request
except ImportError:
print("Install bottle to use the web service ..")
sys.exit(0)
from detectem.cli import get_detection_results
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
run(host='0.0.0.0', port=5723)
Add debug mode to webservice
Add main function to avoid executing the webserver in tests
Reorder importsimport sys
import json
try:
import bottle
from bottle import run, post, request
except ImportError:
print('[+] Install bottle to use the web service')
sys.exit(0)
from detectem.exceptions import SplashError, NoPluginsError
from detectem.cli import get_detection_results
from detectem.settings import DEBUG
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
def main():
bottle.debug(DEBUG)
run(host='0.0.0.0', port=5723)
if __name__ == '__main__':
main()
| <commit_before>import sys
import json
from detectem.exceptions import SplashError, NoPluginsError
try:
from bottle import run, post, request
except ImportError:
print("Install bottle to use the web service ..")
sys.exit(0)
from detectem.cli import get_detection_results
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
run(host='0.0.0.0', port=5723)
<commit_msg>Add debug mode to webservice
Add main function to avoid executing the webserver in tests
Reorder imports<commit_after>import sys
import json
try:
import bottle
from bottle import run, post, request
except ImportError:
print('[+] Install bottle to use the web service')
sys.exit(0)
from detectem.exceptions import SplashError, NoPluginsError
from detectem.cli import get_detection_results
from detectem.settings import DEBUG
@post('/detect')
def do_detection():
url = request.forms.get('url')
metadata = request.forms.get('metadata')
metadata = bool(metadata == '1')
try:
result = get_detection_results(url, metadata=metadata)
except (SplashError, NoPluginsError) as e:
result = {'error': e}
return json.dumps(result)
def main():
bottle.debug(DEBUG)
run(host='0.0.0.0', port=5723)
if __name__ == '__main__':
main()
|
7266d78fa491acc5d2000bdf14cb8bc8dbc8c7f4 | test/testfunctions/__init__.py | test/testfunctions/__init__.py | # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
| # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'marker',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
| Add 'marker' to the list of tests to be run | Add 'marker' to the list of tests to be run
| Python | mit | nschloe/matplotlib2tikz,danielhkl/matplotlib2tikz,m-rossi/matplotlib2tikz,dougnd/matplotlib2tikz | # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
Add 'marker' to the list of tests to be run | # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'marker',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
| <commit_before># -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
<commit_msg>Add 'marker' to the list of tests to be run<commit_after> | # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'marker',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
| # -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
Add 'marker' to the list of tests to be run# -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'marker',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
| <commit_before># -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
<commit_msg>Add 'marker' to the list of tests to be run<commit_after># -*- coding: utf-8 -*-
#
__all__ = [
'annotate',
'basic_sin',
'circle_patch',
'dual_axis',
'errorband',
'errorbar',
'fancybox',
'heat',
'image_plot',
'legends2',
'legends',
'loglogplot',
'logplot',
'marker',
'noise',
'noise2',
'patches',
'scatter',
'subplots',
'subplot4x4',
'text_overlay'
]
# Failing:
# TODO resurrect
# 'colorbars',
# 'histogram',
|
53025df032e1b9296713f3d8f7866e9936ed1af7 | qsimcirq/_version.py | qsimcirq/_version.py | """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.1"
| """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.2.dev20220422"
| Update to dev version 2022-04-22 | Update to dev version 2022-04-22 | Python | apache-2.0 | quantumlib/qsim,quantumlib/qsim,quantumlib/qsim,quantumlib/qsim | """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.1"
Update to dev version 2022-04-22 | """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.2.dev20220422"
| <commit_before>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.12.1"
<commit_msg>Update to dev version 2022-04-22<commit_after> | """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.2.dev20220422"
| """The version number defined here is read automatically in setup.py."""
__version__ = "0.12.1"
Update to dev version 2022-04-22"""The version number defined here is read automatically in setup.py."""
__version__ = "0.12.2.dev20220422"
| <commit_before>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.12.1"
<commit_msg>Update to dev version 2022-04-22<commit_after>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.12.2.dev20220422"
|
3f65b43bce12739af8bb3dfc451a7f58a6af12b1 | dbaas/api/environment.py | dbaas/api/environment.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical import models
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.Environment
fields = ('url', 'id', 'name',)
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
serializer_class = EnvironmentSerializer
queryset = models.Environment.objects.all()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical.models import Environment
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Environment
fields = ('url', 'id', 'name', 'stage', 'provisioner')
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
model = Environment
serializer_class = EnvironmentSerializer
queryset = Environment.objects.all()
filter_fields = (
'id',
'name',
'stage',
'provisioner'
)
def get_queryset(self):
params = self.request.GET.dict()
filter_params = {}
for k, v in params.iteritems():
if k == 'get_provisioner_by_label':
if hasattr(self.model, v.upper()):
label_id = getattr(self.model, v.upper())
filter_params['provisioner'] = label_id
else:
return self.model.objects.none()
elif k.split('__')[0] in self.filter_fields:
filter_params[k] = v
return self.model.objects.filter(**filter_params)
| Add povisioner and stage on env API | Add povisioner and stage on env API
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical import models
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.Environment
fields = ('url', 'id', 'name',)
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
serializer_class = EnvironmentSerializer
queryset = models.Environment.objects.all()
Add povisioner and stage on env API | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical.models import Environment
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Environment
fields = ('url', 'id', 'name', 'stage', 'provisioner')
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
model = Environment
serializer_class = EnvironmentSerializer
queryset = Environment.objects.all()
filter_fields = (
'id',
'name',
'stage',
'provisioner'
)
def get_queryset(self):
params = self.request.GET.dict()
filter_params = {}
for k, v in params.iteritems():
if k == 'get_provisioner_by_label':
if hasattr(self.model, v.upper()):
label_id = getattr(self.model, v.upper())
filter_params['provisioner'] = label_id
else:
return self.model.objects.none()
elif k.split('__')[0] in self.filter_fields:
filter_params[k] = v
return self.model.objects.filter(**filter_params)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical import models
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.Environment
fields = ('url', 'id', 'name',)
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
serializer_class = EnvironmentSerializer
queryset = models.Environment.objects.all()
<commit_msg>Add povisioner and stage on env API<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical.models import Environment
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Environment
fields = ('url', 'id', 'name', 'stage', 'provisioner')
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
model = Environment
serializer_class = EnvironmentSerializer
queryset = Environment.objects.all()
filter_fields = (
'id',
'name',
'stage',
'provisioner'
)
def get_queryset(self):
params = self.request.GET.dict()
filter_params = {}
for k, v in params.iteritems():
if k == 'get_provisioner_by_label':
if hasattr(self.model, v.upper()):
label_id = getattr(self.model, v.upper())
filter_params['provisioner'] = label_id
else:
return self.model.objects.none()
elif k.split('__')[0] in self.filter_fields:
filter_params[k] = v
return self.model.objects.filter(**filter_params)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical import models
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.Environment
fields = ('url', 'id', 'name',)
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
serializer_class = EnvironmentSerializer
queryset = models.Environment.objects.all()
Add povisioner and stage on env API# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical.models import Environment
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Environment
fields = ('url', 'id', 'name', 'stage', 'provisioner')
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
model = Environment
serializer_class = EnvironmentSerializer
queryset = Environment.objects.all()
filter_fields = (
'id',
'name',
'stage',
'provisioner'
)
def get_queryset(self):
params = self.request.GET.dict()
filter_params = {}
for k, v in params.iteritems():
if k == 'get_provisioner_by_label':
if hasattr(self.model, v.upper()):
label_id = getattr(self.model, v.upper())
filter_params['provisioner'] = label_id
else:
return self.model.objects.none()
elif k.split('__')[0] in self.filter_fields:
filter_params[k] = v
return self.model.objects.filter(**filter_params)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical import models
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.Environment
fields = ('url', 'id', 'name',)
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
serializer_class = EnvironmentSerializer
queryset = models.Environment.objects.all()
<commit_msg>Add povisioner and stage on env API<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from physical.models import Environment
class EnvironmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Environment
fields = ('url', 'id', 'name', 'stage', 'provisioner')
class EnvironmentAPI(viewsets.ReadOnlyModelViewSet):
"""
Environment API
"""
model = Environment
serializer_class = EnvironmentSerializer
queryset = Environment.objects.all()
filter_fields = (
'id',
'name',
'stage',
'provisioner'
)
def get_queryset(self):
params = self.request.GET.dict()
filter_params = {}
for k, v in params.iteritems():
if k == 'get_provisioner_by_label':
if hasattr(self.model, v.upper()):
label_id = getattr(self.model, v.upper())
filter_params['provisioner'] = label_id
else:
return self.model.objects.none()
elif k.split('__')[0] in self.filter_fields:
filter_params[k] = v
return self.model.objects.filter(**filter_params)
|
c897942c8b1c3d9283ea6453bcc6616ca3d5108e | builds/python3.6_ci/src/lint_turtle_files.py | builds/python3.6_ci/src/lint_turtle_files.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
print("Hello, I am turtle linter")
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
import logging
import os
import daiquiri
import rdflib
daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger(__name__)
# This is a slightly cheaty way of tracking which paths (if any) failed --
# we append to this global list, and inspect it at the end of the script!
failures = []
def parse_turtle(path):
"""
Try to parse the Turtle at a given path. Raises a ValueError if it fails!
"""
logger.info("Parsing Turtle at path %s", path)
graph = rdflib.Graph()
try:
graph.parse(path, format='ttl')
except Exception as exc:
# Get the name of the exception class
# e.g. rdflib.plugins.parsers.notation3.BadSyntax
exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}'
# Then try to log something useful
logger.error("Error parsing Turtle (%s)", exc_name)
logger.error(exc)
failures.append(path)
else:
logger.info("Successfully parsed Turtle!")
if __name__ == '__main__':
for root, _, filenames in os.walk('.'):
for f in filenames:
if not f.endswith('.ttl'):
continue
path = os.path.join(root, f)
if 'WIP' in path:
logger.info("Skipping path %s as WIP", path)
continue
parse_turtle(path)
| Write a proper Turtle linter | Write a proper Turtle linter
| Python | mit | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
print("Hello, I am turtle linter")
Write a proper Turtle linter | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
import logging
import os
import daiquiri
import rdflib
daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger(__name__)
# This is a slightly cheaty way of tracking which paths (if any) failed --
# we append to this global list, and inspect it at the end of the script!
failures = []
def parse_turtle(path):
"""
Try to parse the Turtle at a given path. Raises a ValueError if it fails!
"""
logger.info("Parsing Turtle at path %s", path)
graph = rdflib.Graph()
try:
graph.parse(path, format='ttl')
except Exception as exc:
# Get the name of the exception class
# e.g. rdflib.plugins.parsers.notation3.BadSyntax
exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}'
# Then try to log something useful
logger.error("Error parsing Turtle (%s)", exc_name)
logger.error(exc)
failures.append(path)
else:
logger.info("Successfully parsed Turtle!")
if __name__ == '__main__':
for root, _, filenames in os.walk('.'):
for f in filenames:
if not f.endswith('.ttl'):
continue
path = os.path.join(root, f)
if 'WIP' in path:
logger.info("Skipping path %s as WIP", path)
continue
parse_turtle(path)
| <commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
print("Hello, I am turtle linter")
<commit_msg>Write a proper Turtle linter<commit_after> | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
import logging
import os
import daiquiri
import rdflib
daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger(__name__)
# This is a slightly cheaty way of tracking which paths (if any) failed --
# we append to this global list, and inspect it at the end of the script!
failures = []
def parse_turtle(path):
"""
Try to parse the Turtle at a given path. Raises a ValueError if it fails!
"""
logger.info("Parsing Turtle at path %s", path)
graph = rdflib.Graph()
try:
graph.parse(path, format='ttl')
except Exception as exc:
# Get the name of the exception class
# e.g. rdflib.plugins.parsers.notation3.BadSyntax
exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}'
# Then try to log something useful
logger.error("Error parsing Turtle (%s)", exc_name)
logger.error(exc)
failures.append(path)
else:
logger.info("Successfully parsed Turtle!")
if __name__ == '__main__':
for root, _, filenames in os.walk('.'):
for f in filenames:
if not f.endswith('.ttl'):
continue
path = os.path.join(root, f)
if 'WIP' in path:
logger.info("Skipping path %s as WIP", path)
continue
parse_turtle(path)
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
print("Hello, I am turtle linter")
Write a proper Turtle linter#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
import logging
import os
import daiquiri
import rdflib
daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger(__name__)
# This is a slightly cheaty way of tracking which paths (if any) failed --
# we append to this global list, and inspect it at the end of the script!
failures = []
def parse_turtle(path):
"""
Try to parse the Turtle at a given path. Raises a ValueError if it fails!
"""
logger.info("Parsing Turtle at path %s", path)
graph = rdflib.Graph()
try:
graph.parse(path, format='ttl')
except Exception as exc:
# Get the name of the exception class
# e.g. rdflib.plugins.parsers.notation3.BadSyntax
exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}'
# Then try to log something useful
logger.error("Error parsing Turtle (%s)", exc_name)
logger.error(exc)
failures.append(path)
else:
logger.info("Successfully parsed Turtle!")
if __name__ == '__main__':
for root, _, filenames in os.walk('.'):
for f in filenames:
if not f.endswith('.ttl'):
continue
path = os.path.join(root, f)
if 'WIP' in path:
logger.info("Skipping path %s as WIP", path)
continue
parse_turtle(path)
| <commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
print("Hello, I am turtle linter")
<commit_msg>Write a proper Turtle linter<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
This script takes the path to a directory, and looks for any Turtle files
(https://www.w3.org/TeamSubmission/turtle/), then uses RDFLib to check if
they're valid TTL.
It exits with code 0 if all files are valid, 1 if not.
"""
import logging
import os
import daiquiri
import rdflib
daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger(__name__)
# This is a slightly cheaty way of tracking which paths (if any) failed --
# we append to this global list, and inspect it at the end of the script!
failures = []
def parse_turtle(path):
"""
Try to parse the Turtle at a given path. Raises a ValueError if it fails!
"""
logger.info("Parsing Turtle at path %s", path)
graph = rdflib.Graph()
try:
graph.parse(path, format='ttl')
except Exception as exc:
# Get the name of the exception class
# e.g. rdflib.plugins.parsers.notation3.BadSyntax
exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}'
# Then try to log something useful
logger.error("Error parsing Turtle (%s)", exc_name)
logger.error(exc)
failures.append(path)
else:
logger.info("Successfully parsed Turtle!")
if __name__ == '__main__':
for root, _, filenames in os.walk('.'):
for f in filenames:
if not f.endswith('.ttl'):
continue
path = os.path.join(root, f)
if 'WIP' in path:
logger.info("Skipping path %s as WIP", path)
continue
parse_turtle(path)
|
62f028d61b9d5669bc05c2bbe5ce5f4e0d4401cc | src/vimapt/library/vimapt/RemoteRepo.py | src/vimapt/library/vimapt/RemoteRepo.py | #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
package_name, version_and_ext = file_name.split('_', 1)
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
| #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
pkg_name_segments = file_name.split("_")
package_name = '_'.join(pkg_name_segments[:-1])
version_and_ext = pkg_name_segments[-1]
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
| Fix bug when parse version info | Fix bug when parse version info
| Python | mit | howl-anderson/vimapt,howl-anderson/vimapt | #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
package_name, version_and_ext = file_name.split('_', 1)
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
Fix bug when parse version info | #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
pkg_name_segments = file_name.split("_")
package_name = '_'.join(pkg_name_segments[:-1])
version_and_ext = pkg_name_segments[-1]
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
| <commit_before>#!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
package_name, version_and_ext = file_name.split('_', 1)
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
<commit_msg>Fix bug when parse version info<commit_after> | #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
pkg_name_segments = file_name.split("_")
package_name = '_'.join(pkg_name_segments[:-1])
version_and_ext = pkg_name_segments[-1]
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
| #!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
package_name, version_and_ext = file_name.split('_', 1)
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
Fix bug when parse version info#!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
pkg_name_segments = file_name.split("_")
package_name = '_'.join(pkg_name_segments[:-1])
version_and_ext = pkg_name_segments[-1]
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
| <commit_before>#!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
package_name, version_and_ext = file_name.split('_', 1)
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
<commit_msg>Fix bug when parse version info<commit_after>#!/usr/bin/env python
import os
from .data_format import dumps
class RemoteRepo(object):
def __init__(self, repo_dir):
self.repo_dir = repo_dir
# initial setup
pool_relative_dir = "pool"
package_relative_path = "index/package"
self.pool_absolute_dir = os.path.join(self.repo_dir, pool_relative_dir)
self.package_abspath = os.path.join(self.repo_dir, package_relative_path)
def make_package_index(self):
package_data = self.scan_pool()
package_stream = dumps(package_data)
fd = open(self.package_abspath, 'w')
fd.write(package_stream)
fd.close()
def scan_pool(self):
files = [f for f in os.listdir(self.pool_absolute_dir)
if os.path.isfile(os.path.join(self.pool_absolute_dir, f))]
package_data = {}
for file_name in files:
pkg_name_segments = file_name.split("_")
package_name = '_'.join(pkg_name_segments[:-1])
version_and_ext = pkg_name_segments[-1]
version = os.path.splitext(version_and_ext)[0]
path = os.path.join('pool/', file_name)
package_info = {'version': version, 'path': path}
package_data[package_name] = package_info
return package_data
|
726c4f14fd5ddd49024163182917aeb9f4af504d | src/wirecloud/core/catalogue_manager.py | src/wirecloud/core/catalogue_manager.py | # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.users.add(user)
| # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
| Make published mashups visibles to all users | Make published mashups visibles to all users
| Python | agpl-3.0 | jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud | # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.users.add(user)
Make published mashups visibles to all users | # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.users.add(user)
<commit_msg>Make published mashups visibles to all users<commit_after> | # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
| # -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.users.add(user)
Make published mashups visibles to all users# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.users.add(user)
<commit_msg>Make published mashups visibles to all users<commit_after># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
|
32cd8227260f5c2fedc50b9b817ee27df2398a82 | Server/Code/database/model.py | Server/Code/database/model.py | from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
fight_id = Column(BigInteger, ForeignKey('fight.id'))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger)
def __init__(self, user_id: int):
self.user_id = user_id
| from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger, ForeignKey(User.id))
def __init__(self, user_id: int):
self.user_id = user_id
| Update user, fight foreign key dependency | Update user, fight foreign key dependency
| Python | mit | HueyPark/Unreal-Knights,HueyPark/Unreal-Knights,HueyPark/Unreal-Knights,HueyPark/Unreal-Knights | from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
fight_id = Column(BigInteger, ForeignKey('fight.id'))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger)
def __init__(self, user_id: int):
self.user_id = user_id
Update user, fight foreign key dependency | from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger, ForeignKey(User.id))
def __init__(self, user_id: int):
self.user_id = user_id
| <commit_before>from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
fight_id = Column(BigInteger, ForeignKey('fight.id'))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger)
def __init__(self, user_id: int):
self.user_id = user_id
<commit_msg>Update user, fight foreign key dependency<commit_after> | from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger, ForeignKey(User.id))
def __init__(self, user_id: int):
self.user_id = user_id
| from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
fight_id = Column(BigInteger, ForeignKey('fight.id'))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger)
def __init__(self, user_id: int):
self.user_id = user_id
Update user, fight foreign key dependencyfrom config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger, ForeignKey(User.id))
def __init__(self, user_id: int):
self.user_id = user_id
| <commit_before>from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
fight_id = Column(BigInteger, ForeignKey('fight.id'))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger)
def __init__(self, user_id: int):
self.user_id = user_id
<commit_msg>Update user, fight foreign key dependency<commit_after>from config import PASSWORD_LENGTH
from sqlalchemy import BigInteger, Column, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(BigInteger, primary_key=True, autoincrement=True)
password = Column(String(PASSWORD_LENGTH))
def __init__(self, password: str):
self.password = password
class Fight(Base):
__tablename__ = 'fight'
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(BigInteger, ForeignKey(User.id))
def __init__(self, user_id: int):
self.user_id = user_id
|
6854f889e38f565acb80c52a74df09730e0f7e45 | uitools/notifications/linux.py | uitools/notifications/linux.py |
from gi.repository import GLib, Notify as LibNotify
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'folder-new'
)
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
|
from gi.repository import GLib, Notify as LibNotify
DEV = False
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
LibNotify.init('com.westernx.uitools.notifications')
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'dialog-information'
)
if DEV:
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
| Clean up for production Linux use | Clean up for production Linux use
| Python | bsd-3-clause | westernx/uitools |
from gi.repository import GLib, Notify as LibNotify
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'folder-new'
)
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
Clean up for production Linux use |
from gi.repository import GLib, Notify as LibNotify
DEV = False
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
LibNotify.init('com.westernx.uitools.notifications')
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'dialog-information'
)
if DEV:
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
| <commit_before>
from gi.repository import GLib, Notify as LibNotify
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'folder-new'
)
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
<commit_msg>Clean up for production Linux use<commit_after> |
from gi.repository import GLib, Notify as LibNotify
DEV = False
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
LibNotify.init('com.westernx.uitools.notifications')
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'dialog-information'
)
if DEV:
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
|
from gi.repository import GLib, Notify as LibNotify
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'folder-new'
)
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
Clean up for production Linux use
from gi.repository import GLib, Notify as LibNotify
DEV = False
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
LibNotify.init('com.westernx.uitools.notifications')
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'dialog-information'
)
if DEV:
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
| <commit_before>
from gi.repository import GLib, Notify as LibNotify
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'folder-new'
)
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
<commit_msg>Clean up for production Linux use<commit_after>
from gi.repository import GLib, Notify as LibNotify
DEV = False
class Notification(object):
def __init__(self, title, message, subtitle=None, sticky=False):
self.title = title
self.subtitle = subtitle
self.message = message
self.sticky = sticky
self._sent = False
def send(self):
# see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html
# see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html
LibNotify.init('com.westernx.uitools.notifications')
self._notification = notification = LibNotify.Notification.new(
self.title,
self.message,
'dialog-information'
)
if DEV:
# Can check LibNotify.get_server_caps() for a list of capabilities.
print 'capabilities', LibNotify.get_server_caps()
# If this is "default", then it is the default action for clicking the notification.
notification.add_action('default', 'Default Action', _on_action, 'on_action_payload')
notification.add_action('not_default', 'Another Action', _on_action, 'another_payload')
notification.connect('closed', _on_action)
notification.set_timeout(5000) # 5s
notification.show()
# NOTE: This object NEEDS to be held onto for the callback to work.
|
3f9d79d50890f8605c7dfbaf5147b6f099b063b4 | Machine/admin.py | Machine/admin.py | from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
| from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac1','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
| Change the third item of the search_field from 'mac' to 'mac1' | Change the third item of the search_field from 'mac' to 'mac1'
| Python | apache-2.0 | abztrakt/labtracker,abztrakt/labtracker,abztrakt/labtracker | from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
Change the third item of the search_field from 'mac' to 'mac1' | from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac1','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
| <commit_before>from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
<commit_msg>Change the third item of the search_field from 'mac' to 'mac1' <commit_after> | from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac1','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
| from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
Change the third item of the search_field from 'mac' to 'mac1' from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac1','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
| <commit_before>from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
<commit_msg>Change the third item of the search_field from 'mac' to 'mac1' <commit_after>from django.contrib import admin
from django.db import models
from Machine import models as mmod
class ContactInLine(admin.TabularInline):
model = mmod.Contact
max_num = 2
class StatusAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
fieldsets = (
(None, {'fields': ('name', 'description')}),
)
class ItemAdmin(admin.ModelAdmin):
list_display = ('name', 'type','location','ip','mac1','mac2',
'wall_port','date_added','manu_tag','uw_tag')
search_fields = ['name','ip','mac1','wall_port']
list_filter = ['type','date_added']
class GroupAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('name', 'is_lab', 'casting_server', 'gateway',
'items', 'description')}),
)
list_display = ('name','is_lab','casting_server','gateway')
inlines = [
ContactInLine,
]
admin.site.register(mmod.Item, ItemAdmin)
admin.site.register(mmod.Group, GroupAdmin)
admin.site.register(mmod.Platform)
admin.site.register(mmod.Type)
admin.site.register(mmod.Location)
admin.site.register(mmod.Status)
# history here for development, remove for production
admin.site.register(mmod.History)
|
513aa1bd78d4f2d592ad5f34835331e315a5af77 | dyngraph/__init__.py | dyngraph/__init__.py | __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils']
| __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils', 'ui']
| Add ui module to path | Add ui module to path
| Python | isc | jaj42/dyngraph,jaj42/GraPhysio,jaj42/GraPhysio | __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils']
Add ui module to path | __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils', 'ui']
| <commit_before>__all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils']
<commit_msg>Add ui module to path<commit_after> | __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils', 'ui']
| __all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils']
Add ui module to path__all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils', 'ui']
| <commit_before>__all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils']
<commit_msg>Add ui module to path<commit_after>__all__ = ['algorithms', 'dialogs', 'exporter', 'legend', 'mainui', 'puplot', 'tsplot', 'utils', 'ui']
|
76df79075d0fdfb310a99f0805ccae253d439ee2 | game/player.py | game/player.py | #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
if board.game_over() and board.winner_color() == self._color:
return True
return False
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
| #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
return board.game_over() and board.winner_color() == self._color
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
| Replace the particular boolean returns with a boolean expression on is_winner | Replace the particular boolean returns with a boolean expression on is_winner
| Python | apache-2.0 | apojomovsky/cuatro_en_linea,apojomovsky/cuatro_en_linea | #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
if board.game_over() and board.winner_color() == self._color:
return True
return False
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
Replace the particular boolean returns with a boolean expression on is_winner | #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
return board.game_over() and board.winner_color() == self._color
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
| <commit_before>#!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
if board.game_over() and board.winner_color() == self._color:
return True
return False
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
<commit_msg>Replace the particular boolean returns with a boolean expression on is_winner<commit_after> | #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
return board.game_over() and board.winner_color() == self._color
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
| #!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
if board.game_over() and board.winner_color() == self._color:
return True
return False
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
Replace the particular boolean returns with a boolean expression on is_winner#!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
return board.game_over() and board.winner_color() == self._color
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
| <commit_before>#!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
if board.game_over() and board.winner_color() == self._color:
return True
return False
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
<commit_msg>Replace the particular boolean returns with a boolean expression on is_winner<commit_after>#!/usr/bin/env python
from abc import ABCMeta, abstractmethod
from game.gameboard import GameBoard
class Player(object):
__metaclass__ = ABCMeta
def __init__(self, color):
self._color = color.lower()
def color(self):
return self._color
def is_winner(self, board):
return board.game_over() and board.winner_color() == self._color
@abstractmethod
def play(self):
"""This method must be overridden"""
pass
class PlayerWithStrategyOne(Player):
def play(self, board):
board.put_chip(board.retrieve_first_non_full_column(), self._color)
class PlayerWithStrategyTwo(Player):
def play(self, board):
board.put_chip(board.retrieve_emptiest_column(), self._color)
|
425152f3c65b6c58065cde9ccbcebd360289ec8c | files_and_folders.py | files_and_folders.py | import os
def files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
| import os
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker
def files_and_folders(dir_path='.'):
'''Return a dict containing a sorted tuple of files and a sorted
tuple of folders'''
f_and_f = os.listdir(dir_path)
folders = [f for f in f_and_f if os.path.isdir(os.path.abspath(f))]
files = set(f_and_f) - set(folders)
return {'files': tuple(sorted(files)), 'folders': tuple(sorted(folders))}
def old_files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
print(files_and_folders())
| Cut number of lines in half | Cut number of lines in half
@The-Penultimate-Defenestrator use of sets is a nice optimization!
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker | Python | apache-2.0 | cclauss/Ten-lines-or-less | import os
def files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
Cut number of lines in half
@The-Penultimate-Defenestrator use of sets is a nice optimization!
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker | import os
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker
def files_and_folders(dir_path='.'):
'''Return a dict containing a sorted tuple of files and a sorted
tuple of folders'''
f_and_f = os.listdir(dir_path)
folders = [f for f in f_and_f if os.path.isdir(os.path.abspath(f))]
files = set(f_and_f) - set(folders)
return {'files': tuple(sorted(files)), 'folders': tuple(sorted(folders))}
def old_files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
print(files_and_folders())
| <commit_before>import os
def files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
<commit_msg>Cut number of lines in half
@The-Penultimate-Defenestrator use of sets is a nice optimization!
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker<commit_after> | import os
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker
def files_and_folders(dir_path='.'):
'''Return a dict containing a sorted tuple of files and a sorted
tuple of folders'''
f_and_f = os.listdir(dir_path)
folders = [f for f in f_and_f if os.path.isdir(os.path.abspath(f))]
files = set(f_and_f) - set(folders)
return {'files': tuple(sorted(files)), 'folders': tuple(sorted(folders))}
def old_files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
print(files_and_folders())
| import os
def files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
Cut number of lines in half
@The-Penultimate-Defenestrator use of sets is a nice optimization!
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-pickerimport os
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker
def files_and_folders(dir_path='.'):
'''Return a dict containing a sorted tuple of files and a sorted
tuple of folders'''
f_and_f = os.listdir(dir_path)
folders = [f for f in f_and_f if os.path.isdir(os.path.abspath(f))]
files = set(f_and_f) - set(folders)
return {'files': tuple(sorted(files)), 'folders': tuple(sorted(folders))}
def old_files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
print(files_and_folders())
| <commit_before>import os
def files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
<commit_msg>Cut number of lines in half
@The-Penultimate-Defenestrator use of sets is a nice optimization!
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker<commit_after>import os
# improvement liberally borrowed from:
# https://forum.omz-software.com/topic/2784/feature-request-pythonista-built-in-file-picker
def files_and_folders(dir_path='.'):
'''Return a dict containing a sorted tuple of files and a sorted
tuple of folders'''
f_and_f = os.listdir(dir_path)
folders = [f for f in f_and_f if os.path.isdir(os.path.abspath(f))]
files = set(f_and_f) - set(folders)
return {'files': tuple(sorted(files)), 'folders': tuple(sorted(folders))}
def old_files_and_folders(dir_path='.'):
files = []
folders = []
for filename in sorted(os.listdir(dir_path)):
if os.path.isdir(os.path.join(dir_path, filename)):
folders.append(filename)
else:
files.append(filename)
return tuple(files), tuple(folders)
print(files_and_folders())
|
a1fdc8e14377d4fe619550e12ea359e5e9c60f0e | dear_astrid/test/helpers.py | dear_astrid/test/helpers.py | import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = False
try:
PY3 = (sys.version_info.major == 3)
except:
pass
if PY3:
def u(string):
return string
else:
exec("def u(string):\n return string + u''\n")
| import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = sys.version_info >= (3,)
def u(string):
if not PY3:
string = string.decode('utf-8')
return string
| Simplify py 2/3 unicode string helper | Simplify py 2/3 unicode string helper
| Python | mit | rwstauner/dear_astrid,rwstauner/dear_astrid | import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = False
try:
PY3 = (sys.version_info.major == 3)
except:
pass
if PY3:
def u(string):
return string
else:
exec("def u(string):\n return string + u''\n")
Simplify py 2/3 unicode string helper | import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = sys.version_info >= (3,)
def u(string):
if not PY3:
string = string.decode('utf-8')
return string
| <commit_before>import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = False
try:
PY3 = (sys.version_info.major == 3)
except:
pass
if PY3:
def u(string):
return string
else:
exec("def u(string):\n return string + u''\n")
<commit_msg>Simplify py 2/3 unicode string helper<commit_after> | import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = sys.version_info >= (3,)
def u(string):
if not PY3:
string = string.decode('utf-8')
return string
| import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = False
try:
PY3 = (sys.version_info.major == 3)
except:
pass
if PY3:
def u(string):
return string
else:
exec("def u(string):\n return string + u''\n")
Simplify py 2/3 unicode string helperimport datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = sys.version_info >= (3,)
def u(string):
if not PY3:
string = string.decode('utf-8')
return string
| <commit_before>import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = False
try:
PY3 = (sys.version_info.major == 3)
except:
pass
if PY3:
def u(string):
return string
else:
exec("def u(string):\n return string + u''\n")
<commit_msg>Simplify py 2/3 unicode string helper<commit_after>import datetime
import os
import sys
import time
from dear_astrid.constants import *
from dear_astrid.constants import __all__ as _constants_all
from dear_astrid.tzinfo import *
from dear_astrid.tzinfo import __all__ as _tzinfo_all
__all__ = [
'dtu',
'u',
'timezone',
] + _constants_all + _tzinfo_all
def dtu(*args):
args = list(args)
while len(args) < 7:
args.append(0)
return datetime.datetime(*(args + [UTC()]))
class timezone(object):
def __init__(self, tz=None):
self.tz = tz
self.orig = None
def set_env(self, tz):
if tz is None:
if 'TZ' in os.environ:
del os.environ['TZ']
else:
os.environ['TZ'] = tz
time.tzset()
def __enter__(self):
self.orig = os.environ.get('TZ', None)
self.set_env(self.tz)
def __exit__(self, *args):
self.set_env(self.orig)
PY3 = sys.version_info >= (3,)
def u(string):
if not PY3:
string = string.decode('utf-8')
return string
|
9c0f06228254a41bd68062feafaf8c8dbaddd06b | marshmallow/base.py | marshmallow/base.py | # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
| # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
import copy
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __deepcopy__(self, memo):
ret = copy.copy(self)
return ret
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
| Speed up deep copy of fields | Speed up deep copy of fields
| Python | mit | dwieeb/marshmallow,VladimirPal/marshmallow,bartaelterman/marshmallow,mwstobo/marshmallow,maximkulkin/marshmallow,Tim-Erwin/marshmallow,jmcarp/marshmallow,0xDCA/marshmallow,daniloakamine/marshmallow,marshmallow-code/marshmallow,xLegoz/marshmallow,Bachmann1234/marshmallow,jmcarp/marshmallow,0xDCA/marshmallow,etataurov/marshmallow,quxiaolong1504/marshmallow | # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
Speed up deep copy of fields | # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
import copy
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __deepcopy__(self, memo):
ret = copy.copy(self)
return ret
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
| <commit_before># -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
<commit_msg>Speed up deep copy of fields<commit_after> | # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
import copy
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __deepcopy__(self, memo):
ret = copy.copy(self)
return ret
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
| # -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
Speed up deep copy of fields# -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
import copy
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __deepcopy__(self, memo):
ret = copy.copy(self)
return ret
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
| <commit_before># -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
<commit_msg>Speed up deep copy of fields<commit_after># -*- coding: utf-8 -*-
'''Abstract base classes.
These are necessary to avoid circular imports between core.py and fields.py.
'''
import copy
class FieldABC(object):
'''Abstract base class from which all Field classes inherit.
'''
parent = None
name = None
def format(self, value):
raise NotImplementedError
def output(self, key, obj):
raise NotImplementedError
def __deepcopy__(self, memo):
ret = copy.copy(self)
return ret
def __repr__(self):
return "<{0} Field>".format(self.__class__.__name__)
__str__ = __repr__
class SerializerABC(object):
'''Abstract base class from which all Serializers inherit.'''
@property
def errors(self):
raise NotImplementedError
def is_valid(self, fields=None):
raise NotImplementedError
|
50c734268a1380379d8d326a0860b2a9f2fade23 | restpose/__init__.py | restpose/__init__.py | # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
| # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .errors import RestPoseError, CheckPointExpiredError
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
| Add RestPoseError and CheckPointExpiredError to top-level module symbols | Add RestPoseError and CheckPointExpiredError to top-level module symbols
| Python | mit | restpose/restpose-py,restpose/restpose-py | # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
Add RestPoseError and CheckPointExpiredError to top-level module symbols | # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .errors import RestPoseError, CheckPointExpiredError
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
| <commit_before># -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
<commit_msg>Add RestPoseError and CheckPointExpiredError to top-level module symbols<commit_after> | # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .errors import RestPoseError, CheckPointExpiredError
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
| # -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
Add RestPoseError and CheckPointExpiredError to top-level module symbols# -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .errors import RestPoseError, CheckPointExpiredError
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
| <commit_before># -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
<commit_msg>Add RestPoseError and CheckPointExpiredError to top-level module symbols<commit_after># -*- coding: utf-8 -
#
# This file is part of the restpose python module, released under the MIT
# license. See the COPYING file for more information.
"""Python client for the RestPose search server.
"""
from .client import Server
from .errors import RestPoseError, CheckPointExpiredError
from .version import dev_release, version_info, __version__
from restkit import ResourceNotFound, Unauthorized, RequestFailed, \
RedirectLimit, RequestError, InvalidUrl, \
ResponseError, ProxyError, ResourceError
|
f41ff2e0a2eee56dd090e729b4470923958444b9 | conditional/util/flask.py | conditional/util/flask.py | from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval=is_eval,
is_financial=is_financial,
is_intromember=is_intromember,
**kwargs)
| from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval_director=is_eval,
is_financial_director=is_financial,
is_intromember=is_intromember,
**kwargs)
| Fix template var exports for directors | Fix template var exports for directors
| Python | mit | ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,RamZallan/conditional,ComputerScienceHouse/conditional | from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval=is_eval,
is_financial=is_financial,
is_intromember=is_intromember,
**kwargs)
Fix template var exports for directors | from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval_director=is_eval,
is_financial_director=is_financial,
is_intromember=is_intromember,
**kwargs)
| <commit_before>from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval=is_eval,
is_financial=is_financial,
is_intromember=is_intromember,
**kwargs)
<commit_msg>Fix template var exports for directors<commit_after> | from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval_director=is_eval,
is_financial_director=is_financial,
is_intromember=is_intromember,
**kwargs)
| from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval=is_eval,
is_financial=is_financial,
is_intromember=is_intromember,
**kwargs)
Fix template var exports for directorsfrom flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval_director=is_eval,
is_financial_director=is_financial,
is_intromember=is_intromember,
**kwargs)
| <commit_before>from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval=is_eval,
is_financial=is_financial,
is_intromember=is_intromember,
**kwargs)
<commit_msg>Fix template var exports for directors<commit_after>from flask import render_template as flask_render_template
from db.models import EvalSettings
from util.ldap import ldap_is_active
from util.ldap import ldap_is_alumni
from util.ldap import ldap_is_eboard
from util.ldap import ldap_is_financial_director
from util.ldap import ldap_is_eval_director
from util.ldap import ldap_is_intromember
def render_template(request, template_name, **kwargs):
user_name = request.headers.get('x-webauth-user')
#TODO maybe use the webauth request decorator
lockdown = EvalSettings.query.first().site_lockdown
is_active = ldap_is_active(user_name)
is_alumni = ldap_is_alumni(user_name)
is_eboard = ldap_is_eboard(user_name)
is_financial = ldap_is_financial_director(user_name)
is_eval = ldap_is_eval_director(user_name)
is_intromember = ldap_is_intromember(user_name)
if is_eval:
lockdown = False
# TODO FIXME AUTH BREACH
if user_name == 'loothelion':
is_eboard = True
return flask_render_template(
template_name,
lockdown=lockdown,
is_active=is_active,
is_alumni=is_alumni,
is_eboard=is_eboard,
is_eval_director=is_eval,
is_financial_director=is_financial,
is_intromember=is_intromember,
**kwargs)
|
746be2e5557f6626e984a679f1699c6a76fa932e | miner/block_test.py | miner/block_test.py | import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = 0x123123
tree = TestBlock.MerkleTreeMock()
time = 0x432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
| import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = bytes([123] * 32)
tree = TestBlock.MerkleTreeMock()
time = 432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
| Change block init test to pass bytes for hash | Change block init test to pass bytes for hash
| Python | mit | DrPandemic/pickaxe,DrPandemic/pickaxe,DrPandemic/pickaxe,DrPandemic/pickaxe | import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = 0x123123
tree = TestBlock.MerkleTreeMock()
time = 0x432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
Change block init test to pass bytes for hash | import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = bytes([123] * 32)
tree = TestBlock.MerkleTreeMock()
time = 432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
| <commit_before>import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = 0x123123
tree = TestBlock.MerkleTreeMock()
time = 0x432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
<commit_msg>Change block init test to pass bytes for hash<commit_after> | import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = bytes([123] * 32)
tree = TestBlock.MerkleTreeMock()
time = 432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
| import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = 0x123123
tree = TestBlock.MerkleTreeMock()
time = 0x432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
Change block init test to pass bytes for hashimport unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = bytes([123] * 32)
tree = TestBlock.MerkleTreeMock()
time = 432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
| <commit_before>import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = 0x123123
tree = TestBlock.MerkleTreeMock()
time = 0x432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
<commit_msg>Change block init test to pass bytes for hash<commit_after>import unittest
from block import Block
class TestBlock(unittest.TestCase):
class MerkleTreeMock:
pass
def test_init(self):
prev = bytes([123] * 32)
tree = TestBlock.MerkleTreeMock()
time = 432432
bits = 0x1a44b9f2
b = Block(prev, tree, time, bits)
self.assertEqual(b.previous_block_hash, prev)
self.assertEqual(b.version, Block.VERSION)
self.assertEqual(b.merkle_tree, tree)
self.assertEqual(b.time, time)
self.assertEqual(b.difficulty, bits)
|
65695bd7c4c7fcf3449358c0946e4584bb30a8ec | climate_data/migrations/0024_auto_20170623_0308.py | climate_data/migrations/0024_auto_20170623_0308.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 5000
count = Reading.objects.all().count()
while offset < count:
for reading in Reading.objects.all()[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
offset += pagesize
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
import sys
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 25000
count = Reading.objects.filter(station_sensor_link=None).count()
sys.stdout.write("\n")
sys.stdout.write("\r{}/{}".format(offset, count))
sys.stdout.flush()
while offset < count:
for reading in Reading.objects.filter(station_sensor_link=None)[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
sys.stdout.write("\r{}/{} ({}%)".format(offset, count, (offset / count) * 100))
sys.stdout.flush()
offset += pagesize
sys.stdout.write("\n")
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
| Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration. | Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration.
| Python | apache-2.0 | qubs/data-centre,qubs/climate-data-api,qubs/data-centre,qubs/climate-data-api | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 5000
count = Reading.objects.all().count()
while offset < count:
for reading in Reading.objects.all()[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
offset += pagesize
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration. | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
import sys
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 25000
count = Reading.objects.filter(station_sensor_link=None).count()
sys.stdout.write("\n")
sys.stdout.write("\r{}/{}".format(offset, count))
sys.stdout.flush()
while offset < count:
for reading in Reading.objects.filter(station_sensor_link=None)[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
sys.stdout.write("\r{}/{} ({}%)".format(offset, count, (offset / count) * 100))
sys.stdout.flush()
offset += pagesize
sys.stdout.write("\n")
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 5000
count = Reading.objects.all().count()
while offset < count:
for reading in Reading.objects.all()[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
offset += pagesize
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
<commit_msg>Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration.<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
import sys
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 25000
count = Reading.objects.filter(station_sensor_link=None).count()
sys.stdout.write("\n")
sys.stdout.write("\r{}/{}".format(offset, count))
sys.stdout.flush()
while offset < count:
for reading in Reading.objects.filter(station_sensor_link=None)[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
sys.stdout.write("\r{}/{} ({}%)".format(offset, count, (offset / count) * 100))
sys.stdout.flush()
offset += pagesize
sys.stdout.write("\n")
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 5000
count = Reading.objects.all().count()
while offset < count:
for reading in Reading.objects.all()[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
offset += pagesize
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration.# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
import sys
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 25000
count = Reading.objects.filter(station_sensor_link=None).count()
sys.stdout.write("\n")
sys.stdout.write("\r{}/{}".format(offset, count))
sys.stdout.flush()
while offset < count:
for reading in Reading.objects.filter(station_sensor_link=None)[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
sys.stdout.write("\r{}/{} ({}%)".format(offset, count, (offset / count) * 100))
sys.stdout.flush()
offset += pagesize
sys.stdout.write("\n")
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 5000
count = Reading.objects.all().count()
while offset < count:
for reading in Reading.objects.all()[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
offset += pagesize
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
<commit_msg>Increase page size again in station-sensor link / reading migration, and add a percent indicator. Exclude any readings that have previously been fixed up by the migration.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-23 03:08
from __future__ import unicode_literals
from django.db import migrations
import sys
# noinspection PyUnusedLocal
def add_station_sensor_link_to_reading(apps, schema_editor):
# noinspection PyPep8Naming
Reading = apps.get_model('climate_data', 'Reading')
# noinspection PyPep8Naming
StationSensorLink = apps.get_model('climate_data', 'StationSensorLink')
offset = 0
pagesize = 25000
count = Reading.objects.filter(station_sensor_link=None).count()
sys.stdout.write("\n")
sys.stdout.write("\r{}/{}".format(offset, count))
sys.stdout.flush()
while offset < count:
for reading in Reading.objects.filter(station_sensor_link=None)[offset:offset+pagesize].iterator():
reading.station_sensor_link = StationSensorLink.objects.filter(
station=reading.station,
sensor=reading.sensor
).first()
reading.save()
sys.stdout.write("\r{}/{} ({}%)".format(offset, count, (offset / count) * 100))
sys.stdout.flush()
offset += pagesize
sys.stdout.write("\n")
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0023_reading_station_sensor_link'),
]
operations = [
migrations.RunPython(add_station_sensor_link_to_reading),
]
|
5c353a23dfc2378c97fb4888db09e4a505bf8f8f | editorconfig/__init__.py | editorconfig/__init__.py | """EditorConfig Python Core"""
from versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
| """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| Fix import style for Python3 | Fix import style for Python3
| Python | bsd-2-clause | johnfraney/editorconfig-vim,johnfraney/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,pocke/editorconfig-vim | """EditorConfig Python Core"""
from versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
Fix import style for Python3 | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| <commit_before>"""EditorConfig Python Core"""
from versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
<commit_msg>Fix import style for Python3<commit_after> | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| """EditorConfig Python Core"""
from versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
Fix import style for Python3"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| <commit_before>"""EditorConfig Python Core"""
from versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
<commit_msg>Fix import style for Python3<commit_after>"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 0, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
|
afd67433190427f0234bc27ef67cde8b2f6ae746 | gravatar/gravatar.py | gravatar/gravatar.py | """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "http://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
| """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
| Use https instead http to get secure resources content in blog over https | Use https instead http to get secure resources content in blog over https | Python | agpl-3.0 | farseerfc/pelican-plugins,MarkusH/pelican-plugins,farseerfc/pelican-plugins,danmackinlay/pelican-plugins,florianjacob/pelican-plugins,cctags/pelican-plugins,farseerfc/pelican-plugins,rlaboiss/pelican-plugins,jakevdp/pelican-plugins,rlaboiss/pelican-plugins,if1live/pelican-plugins,kdheepak89/pelican-plugins,florianjacob/pelican-plugins,MarkusH/pelican-plugins,benjaminabel/pelican-plugins,mitchins/pelican-plugins,if1live/pelican-plugins,davidmarquis/pelican-plugins,jantman/pelican-plugins,davidmarquis/pelican-plugins,benjaminabel/pelican-plugins,mitchins/pelican-plugins,jakevdp/pelican-plugins,if1live/pelican-plugins,pestrickland/pelican-plugins,xsteadfastx/pelican-plugins,wilsonfreitas/pelican-plugins,cctags/pelican-plugins,andreas-h/pelican-plugins,kdheepak89/pelican-plugins,rlaboiss/pelican-plugins,wilsonfreitas/pelican-plugins,jakevdp/pelican-plugins,jantman/pelican-plugins,florianjacob/pelican-plugins,wilsonfreitas/pelican-plugins,UHBiocomputation/pelican-plugins,mortada/pelican-plugins,andreas-h/pelican-plugins,mikitex70/pelican-plugins,pxquim/pelican-plugins,danmackinlay/pelican-plugins,pxquim/pelican-plugins,farseerfc/pelican-plugins,prisae/pelican-plugins,pestrickland/pelican-plugins,UHBiocomputation/pelican-plugins,talha131/pelican-plugins,ingwinlu/pelican-plugins,M157q/pelican-plugins,MarkusH/pelican-plugins,ingwinlu/pelican-plugins,talha131/pelican-plugins,MarkusH/pelican-plugins,cctags/pelican-plugins,gjreda/pelican-plugins,howthebodyworks/pelican-plugins,mikitex70/pelican-plugins,xsteadfastx/pelican-plugins,if1live/pelican-plugins,mitchins/pelican-plugins,danmackinlay/pelican-plugins,M157q/pelican-plugins,farseerfc/pelican-plugins,pestrickland/pelican-plugins,davidmarquis/pelican-plugins,kdheepak89/pelican-plugins,benjaminabel/pelican-plugins,mortada/pelican-plugins,talha131/pelican-plugins,benjaminabel/pelican-plugins,publicus/pelican-plugins,howthebodyworks/pelican-plugins,ingwinlu/pelican-plugins,mikitex70/pelican-plugins,wilsonfreitas/pelican-plugins,publicus/pelican-plugins,jantman/pelican-plugins,prisae/pelican-plugins,gjreda/pelican-plugins,andreas-h/pelican-plugins,pxquim/pelican-plugins,publicus/pelican-plugins,UHBiocomputation/pelican-plugins,xsteadfastx/pelican-plugins,prisae/pelican-plugins,pestrickland/pelican-plugins,andreas-h/pelican-plugins,ingwinlu/pelican-plugins,gjreda/pelican-plugins,cctags/pelican-plugins,howthebodyworks/pelican-plugins,davidmarquis/pelican-plugins,mitchins/pelican-plugins,talha131/pelican-plugins,jantman/pelican-plugins,M157q/pelican-plugins,mikitex70/pelican-plugins,danmackinlay/pelican-plugins,mortada/pelican-plugins,kdheepak89/pelican-plugins,pxquim/pelican-plugins,UHBiocomputation/pelican-plugins,M157q/pelican-plugins,mortada/pelican-plugins,rlaboiss/pelican-plugins,jakevdp/pelican-plugins,MarkusH/pelican-plugins,howthebodyworks/pelican-plugins,talha131/pelican-plugins,florianjacob/pelican-plugins,xsteadfastx/pelican-plugins,prisae/pelican-plugins,gjreda/pelican-plugins,publicus/pelican-plugins,mortada/pelican-plugins | """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "http://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
Use https instead http to get secure resources content in blog over https | """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
| <commit_before>"""
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "http://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
<commit_msg>Use https instead http to get secure resources content in blog over https<commit_after> | """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
| """
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "http://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
Use https instead http to get secure resources content in blog over https"""
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
| <commit_before>"""
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "http://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
<commit_msg>Use https instead http to get secure resources content in blog over https<commit_after>"""
Gravatar plugin for Pelican
===========================
This plugin assigns the ``author_gravatar`` variable to the Gravatar URL and
makes the variable available within the article's context.
"""
import hashlib
import six
from pelican import signals
def add_gravatar(generator, metadata):
#first check email
if 'email' not in metadata.keys()\
and 'AUTHOR_EMAIL' in generator.settings.keys():
metadata['email'] = generator.settings['AUTHOR_EMAIL']
#then add gravatar url
if 'email' in metadata.keys():
email_bytes = six.b(metadata['email']).lower()
gravatar_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(email_bytes).hexdigest()
metadata["author_gravatar"] = gravatar_url
def register():
signals.article_generator_context.connect(add_gravatar)
|
a89983d2e218c6438587be9e6ea89e76f4c5a8c6 | 14B-088/Combined_HI/HI_single_channel_clean.py | 14B-088/Combined_HI/HI_single_channel_clean.py |
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
|
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80, 160],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
| Add larger scale to match C config only map | Add larger scale to match C config only map
| Python | mit | e-koch/VLA_Lband,e-koch/VLA_Lband |
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
Add larger scale to match C config only map |
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80, 160],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
| <commit_before>
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
<commit_msg>Add larger scale to match C config only map<commit_after> |
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80, 160],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
|
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
Add larger scale to match C config only map
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80, 160],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
| <commit_before>
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
<commit_msg>Add larger scale to match C config only map<commit_after>
import sys
from casa_tools import myclean
'''
Cleans an MS with a single channel given a mask and a model
'''
vis_1 = sys.argv[-5]
vis_2 = sys.argv[-4]
model = sys.argv[-3]
mask = sys.argv[-2]
out_root = sys.argv[-1]
if model == "None":
model = None
else:
model = [model] * 2
if mask == "None":
mask = None
else:
mask = [mask] * 2
myclean(vis=[vis_1, vis_2], imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80, 160],
threshold='2mJy/beam', imagermode='mosaic', gain=0.1,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=200000,
pbcor=True, minpb=0.2, interpolation='linear', usescratch=False,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
modelimage=model, mask=mask)
|
f6b7a4ec8aa72acfd93e7f85199b251e91ca4465 | cherrypy/test/test_refleaks.py | cherrypy/test/test_refleaks.py | """Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
| """Tests for refleaks."""
import itertools
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = itertools.count()
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
next(success)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(next(success), ITERATIONS)
| Use a simple counter rather than appending booleans to a list and counting them. | Use a simple counter rather than appending booleans to a list and counting them.
| Python | bsd-3-clause | cherrypy/cheroot,Safihre/cherrypy,cherrypy/cherrypy,cherrypy/cherrypy,Safihre/cherrypy | """Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
Use a simple counter rather than appending booleans to a list and counting them. | """Tests for refleaks."""
import itertools
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = itertools.count()
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
next(success)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(next(success), ITERATIONS)
| <commit_before>"""Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
<commit_msg>Use a simple counter rather than appending booleans to a list and counting them.<commit_after> | """Tests for refleaks."""
import itertools
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = itertools.count()
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
next(success)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(next(success), ITERATIONS)
| """Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
Use a simple counter rather than appending booleans to a list and counting them."""Tests for refleaks."""
import itertools
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = itertools.count()
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
next(success)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(next(success), ITERATIONS)
| <commit_before>"""Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
<commit_msg>Use a simple counter rather than appending booleans to a list and counting them.<commit_after>"""Tests for refleaks."""
import itertools
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
cherrypy.tree.mount(Root())
def test_threadlocal_garbage(self):
success = itertools.count()
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
next(success)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(next(success), ITERATIONS)
|
0141527409771ff9396252eedd02c6729aa3f3d9 | eegtools/data/schalk_physiobank_test.py | eegtools/data/schalk_physiobank_test.py | import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 361 events in 11 '\
'classes.'
| import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
| Fix regression test after EDF+ annotation fix in 60753f. | Fix regression test after EDF+ annotation fix in 60753f.
| Python | bsd-3-clause | breuderink/eegtools | import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 361 events in 11 '\
'classes.'
Fix regression test after EDF+ annotation fix in 60753f. | import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
| <commit_before>import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 361 events in 11 '\
'classes.'
<commit_msg>Fix regression test after EDF+ annotation fix in 60753f.<commit_after> | import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
| import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 361 events in 11 '\
'classes.'
Fix regression test after EDF+ annotation fix in 60753f.import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
| <commit_before>import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 361 events in 11 '\
'classes.'
<commit_msg>Fix regression test after EDF+ annotation fix in 60753f.<commit_after>import schalk_physiobank
def test_s1():
r = schalk_physiobank.load(1)
assert str(r) == 'Recording "schalk-physiobank-s1" (64 channels x 259520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
def test_s14():
'''Test subject 14 which contains runs *without* any event.'''
r = schalk_physiobank.load(14)
assert str(r) == 'Recording "schalk-physiobank-s14" (64 channels x 255520 '\
'samples) at 160.00 Hz in 14 continuous blocks, with 362 events in 11 '\
'classes.'
|
0c8e3f6d962a573618d5224b20b68e57790f4beb | feder/institutions/forms.py | feder/institutions/forms.py | # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address']
| # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address', 'jst']
| Fix InstitutionForm to include jst field | Fix InstitutionForm to include jst field
| Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address']
Fix InstitutionForm to include jst field | # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address', 'jst']
| <commit_before># -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address']
<commit_msg>Fix InstitutionForm to include jst field<commit_after> | # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address', 'jst']
| # -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address']
Fix InstitutionForm to include jst field# -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address', 'jst']
| <commit_before># -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address']
<commit_msg>Fix InstitutionForm to include jst field<commit_after># -*- coding: utf-8 -*-
from atom.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Institution
class InstitutionForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
super(InstitutionForm, self).__init__(*args, **kwargs)
class Meta:
model = Institution
fields = ['name', 'address', 'jst']
|
4ea698622a7a581b84ce61de8f5f65ce12f0c129 | morenines/output.py | morenines/output.py | import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in filelist:
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| Make print_filelist sort the list before printing | Make print_filelist sort the list before printing
| Python | mit | mcgid/morenines,mcgid/morenines | import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in filelist:
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
Make print_filelist sort the list before printing | import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| <commit_before>import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in filelist:
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
<commit_msg>Make print_filelist sort the list before printing<commit_after> | import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in filelist:
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
Make print_filelist sort the list before printingimport click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| <commit_before>import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in filelist:
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
<commit_msg>Make print_filelist sort the list before printing<commit_after>import click
def print_message(message):
print message
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
print_message("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
|
a5b3dd62e58dc23c03b7876ee99b757022413e94 | billjobs/urls.py | billjobs/urls.py | from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^user/$', views.UserAdmin.as_view(), name='user'),
url(r'^user/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token, name='api-token-auth')
]
| from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
api_patterns = [
url(r'^auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^token-auth/', obtain_auth_token, name='api-token-auth'),
url(r'^users/$', views.UserAdmin.as_view(), name='users-api'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='users-detail-api'),
]
urlpatterns = [
url(r'api/1.0/', include(api_patterns)),
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
]
| Move api auth url to api_patterns | Move api auth url to api_patterns
| Python | mit | ioO/billjobs | from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^user/$', views.UserAdmin.as_view(), name='user'),
url(r'^user/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token, name='api-token-auth')
]
Move api auth url to api_patterns | from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
api_patterns = [
url(r'^auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^token-auth/', obtain_auth_token, name='api-token-auth'),
url(r'^users/$', views.UserAdmin.as_view(), name='users-api'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='users-detail-api'),
]
urlpatterns = [
url(r'api/1.0/', include(api_patterns)),
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
]
| <commit_before>from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^user/$', views.UserAdmin.as_view(), name='user'),
url(r'^user/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token, name='api-token-auth')
]
<commit_msg>Move api auth url to api_patterns<commit_after> | from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
api_patterns = [
url(r'^auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^token-auth/', obtain_auth_token, name='api-token-auth'),
url(r'^users/$', views.UserAdmin.as_view(), name='users-api'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='users-detail-api'),
]
urlpatterns = [
url(r'api/1.0/', include(api_patterns)),
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
]
| from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^user/$', views.UserAdmin.as_view(), name='user'),
url(r'^user/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token, name='api-token-auth')
]
Move api auth url to api_patternsfrom django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
api_patterns = [
url(r'^auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^token-auth/', obtain_auth_token, name='api-token-auth'),
url(r'^users/$', views.UserAdmin.as_view(), name='users-api'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='users-detail-api'),
]
urlpatterns = [
url(r'api/1.0/', include(api_patterns)),
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
]
| <commit_before>from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^user/$', views.UserAdmin.as_view(), name='user'),
url(r'^user/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token, name='api-token-auth')
]
<commit_msg>Move api auth url to api_patterns<commit_after>from django.conf.urls import url, include
from rest_framework.authtoken.views import obtain_auth_token
from . import views
api_patterns = [
url(r'^auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^token-auth/', obtain_auth_token, name='api-token-auth'),
url(r'^users/$', views.UserAdmin.as_view(), name='users-api'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(),
name='users-detail-api'),
]
urlpatterns = [
url(r'api/1.0/', include(api_patterns)),
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
]
|
71798ca99fe7245a578ea1d6ba367e485d9ad5f8 | mvp/renderlayers.py | mvp/renderlayers.py | # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers():
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
| # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(
query=True,
currentRenderLayer=True,
)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers() + [rs.getDefaultRenderLayer()]:
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
| Fix defaultRenderLayer was not included when blasting all layers. | Fix defaultRenderLayer was not included when blasting all layers.
| Python | mit | danbradham/mvp | # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers():
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
Fix defaultRenderLayer was not included when blasting all layers. | # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(
query=True,
currentRenderLayer=True,
)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers() + [rs.getDefaultRenderLayer()]:
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
| <commit_before># -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers():
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
<commit_msg>Fix defaultRenderLayer was not included when blasting all layers.<commit_after> | # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(
query=True,
currentRenderLayer=True,
)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers() + [rs.getDefaultRenderLayer()]:
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
| # -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers():
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
Fix defaultRenderLayer was not included when blasting all layers.# -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(
query=True,
currentRenderLayer=True,
)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers() + [rs.getDefaultRenderLayer()]:
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
| <commit_before># -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers():
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
<commit_msg>Fix defaultRenderLayer was not included when blasting all layers.<commit_after># -*- coding: utf-8 -*-
from contextlib import contextmanager
import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
@contextmanager
def enabled_render_layers():
old_layer = cmds.editRenderLayerGlobals(
query=True,
currentRenderLayer=True,
)
try:
rs = renderSetup.instance()
def switchToLayer(layer):
def _switch():
rs.switchToLayer(layer)
return _switch
enabled_layers = []
for layer in rs.getRenderLayers() + [rs.getDefaultRenderLayer()]:
layer.switchToLayer = switchToLayer(layer)
if layer.isRenderable():
enabled_layers.append(layer)
yield enabled_layers
finally:
cmds.editRenderLayerGlobals(currentRenderLayer=old_layer)
|
56528264cdc76dc1b00804b7f67908d3bb1b1b0e | flask_appconfig/docker.py | flask_appconfig/docker.py | #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| Use correct database name instead of None when not supplied. | Use correct database name instead of None when not supplied.
| Python | mit | mbr/flask-appconfig | #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
Use correct database name instead of None when not supplied. | #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| <commit_before>#!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
<commit_msg>Use correct database name instead of None when not supplied.<commit_after> | #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| #!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
Use correct database name instead of None when not supplied.#!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| <commit_before>#!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
<commit_msg>Use correct database name instead of None when not supplied.<commit_after>#!/usr/bin/env python
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
|
809f2f781fb2186e3bad27cdd19115b6f425cd0b | tint/tests/test_grid_utils.py | tint/tests/test_grid_utils.py | """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 6))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
| """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 102000))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
| Update for unit tests for PR 37 | TST: Update for unit tests for PR 37
| Python | bsd-2-clause | openradar/TINT,openradar/TINT | """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 6))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
TST: Update for unit tests for PR 37 | """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 102000))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
| <commit_before>""" Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 6))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
<commit_msg>TST: Update for unit tests for PR 37<commit_after> | """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 102000))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
| """ Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 6))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
TST: Update for unit tests for PR 37""" Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 102000))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
| <commit_before>""" Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 6))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
<commit_msg>TST: Update for unit tests for PR 37<commit_after>""" Unit tests for grid_utils module. """
from datetime import datetime
import numpy as np
from tint import grid_utils
from tint.testing.sample_objects import grid, field
from tint.testing.sample_objects import params, grid_size
def test_parse_grid_datetime():
dt = grid_utils.parse_grid_datetime(grid)
assert(dt == datetime(2015, 7, 10, 18, 34, 102000))
def test_get_grid_size():
grid_size = grid_utils.get_grid_size(grid)
assert np.all(grid_size == np.array([500., 500., 500.]))
def test_extract_grid_data():
raw, filtered = grid_utils.extract_grid_data(grid, field,
grid_size, params)
assert np.max(filtered) == 11
assert np.min(filtered) == 0
|
c8a08042cc7eb0ddb0b617f1fb935691a5803a29 | gitcommitautosave.py | gitcommitautosave.py | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
| Add PULLREQ_EDITMSG for hub pull requests | Add PULLREQ_EDITMSG for hub pull requests
| Python | mit | aristidesfl/sublime-git-commit-message-auto-save | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
Add PULLREQ_EDITMSG for hub pull requests | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
| <commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
<commit_msg>Add PULLREQ_EDITMSG for hub pull requests<commit_after> | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
Add PULLREQ_EDITMSG for hub pull requests"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
| <commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
<commit_msg>Add PULLREQ_EDITMSG for hub pull requests<commit_after>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
|
07e7f5023958538933802f78c7bdd5d61f04a825 | flocker/restapi/__init__.py | flocker/restapi/__init__.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import structured
__all__ = ["structured"]
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import (
structured, EndpointResponse, userDocumentation,
)
__all__ = ["structured", "EndpointResponse", "userDocumentation"]
| Address review comment: Make more APIs public. | Address review comment: Make more APIs public.
| Python | apache-2.0 | moypray/flocker,adamtheturtle/flocker,Azulinho/flocker,agonzalezro/flocker,1d4Nf6/flocker,1d4Nf6/flocker,runcom/flocker,1d4Nf6/flocker,moypray/flocker,lukemarsden/flocker,LaynePeng/flocker,jml/flocker,moypray/flocker,adamtheturtle/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,jml/flocker,AndyHuu/flocker,achanda/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,Azulinho/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,achanda/flocker,w4ngyi/flocker,achanda/flocker,Azulinho/flocker,AndyHuu/flocker,LaynePeng/flocker,agonzalezro/flocker,mbrukman/flocker,mbrukman/flocker,agonzalezro/flocker,AndyHuu/flocker,runcom/flocker,w4ngyi/flocker,jml/flocker,hackday-profilers/flocker,hackday-profilers/flocker,LaynePeng/flocker,runcom/flocker,adamtheturtle/flocker,w4ngyi/flocker | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import structured
__all__ = ["structured"]
Address review comment: Make more APIs public. | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import (
structured, EndpointResponse, userDocumentation,
)
__all__ = ["structured", "EndpointResponse", "userDocumentation"]
| <commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import structured
__all__ = ["structured"]
<commit_msg>Address review comment: Make more APIs public.<commit_after> | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import (
structured, EndpointResponse, userDocumentation,
)
__all__ = ["structured", "EndpointResponse", "userDocumentation"]
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import structured
__all__ = ["structured"]
Address review comment: Make more APIs public.# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import (
structured, EndpointResponse, userDocumentation,
)
__all__ = ["structured", "EndpointResponse", "userDocumentation"]
| <commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import structured
__all__ = ["structured"]
<commit_msg>Address review comment: Make more APIs public.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Infrastructure for publishing a REST HTTP API.
"""
from ._infrastructure import (
structured, EndpointResponse, userDocumentation,
)
__all__ = ["structured", "EndpointResponse", "userDocumentation"]
|
b016fad5d55993b064a1c4d15fd281f439045491 | gateway/camera/device.py | gateway/camera/device.py | from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
| from tornado import gen
from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
| Fix CameraDevice's send method is not called | Fix CameraDevice's send method is not called
Add send method @gen.coroutine decorator | Python | mit | walkover/auto-tracking-cctv-gateway | from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
Fix CameraDevice's send method is not called
Add send method @gen.coroutine decorator | from tornado import gen
from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
| <commit_before>from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
<commit_msg>Fix CameraDevice's send method is not called
Add send method @gen.coroutine decorator<commit_after> | from tornado import gen
from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
| from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
Fix CameraDevice's send method is not called
Add send method @gen.coroutine decoratorfrom tornado import gen
from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
| <commit_before>from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
<commit_msg>Fix CameraDevice's send method is not called
Add send method @gen.coroutine decorator<commit_after>from tornado import gen
from gateway import net
class CameraDevice(object):
def __init__(self, stream, address):
self.resolution = None
self.framerate = None
self.__stream = stream
self.__address = address
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
|
9f0c05eb9926dc5a9be6eb65bd71f7f1218e24e1 | grano/logic/validation.py | grano/logic/validation.py | import re
import colander
from colander import Invalid
from grano.logic.references import ProjectRef
from grano.core import db
from grano.model import Schema, Attribute
FORBIDDEN = ['project', 'source', 'target', 'id', 'created_at', 'updated_at', 'author', 'author_id']
database_forbidden = colander.Function(lambda v: v not in FORBIDDEN, message="Reserved name")
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_name = colander.All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
| import colander
from colander import Invalid
class All(object):
""" Composite validator which succeeds if none of its
subvalidators raises an :class:`colander.Invalid` exception"""
def __init__(self, *validators):
self.validators = validators
def __call__(self, node, value):
for validator in self.validators:
validator(node, value)
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_forbidden = colander.Regex('^(project|source|target|id|created_at" \
+ "|updated_at|author|author_id)$')
database_name = All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
| Fix handling of All() exceptions. | Fix handling of All() exceptions. | Python | mit | 4bic/grano,CodeForAfrica/grano,4bic-attic/grano,granoproject/grano | import re
import colander
from colander import Invalid
from grano.logic.references import ProjectRef
from grano.core import db
from grano.model import Schema, Attribute
FORBIDDEN = ['project', 'source', 'target', 'id', 'created_at', 'updated_at', 'author', 'author_id']
database_forbidden = colander.Function(lambda v: v not in FORBIDDEN, message="Reserved name")
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_name = colander.All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
Fix handling of All() exceptions. | import colander
from colander import Invalid
class All(object):
""" Composite validator which succeeds if none of its
subvalidators raises an :class:`colander.Invalid` exception"""
def __init__(self, *validators):
self.validators = validators
def __call__(self, node, value):
for validator in self.validators:
validator(node, value)
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_forbidden = colander.Regex('^(project|source|target|id|created_at" \
+ "|updated_at|author|author_id)$')
database_name = All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
| <commit_before>import re
import colander
from colander import Invalid
from grano.logic.references import ProjectRef
from grano.core import db
from grano.model import Schema, Attribute
FORBIDDEN = ['project', 'source', 'target', 'id', 'created_at', 'updated_at', 'author', 'author_id']
database_forbidden = colander.Function(lambda v: v not in FORBIDDEN, message="Reserved name")
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_name = colander.All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
<commit_msg>Fix handling of All() exceptions. <commit_after> | import colander
from colander import Invalid
class All(object):
""" Composite validator which succeeds if none of its
subvalidators raises an :class:`colander.Invalid` exception"""
def __init__(self, *validators):
self.validators = validators
def __call__(self, node, value):
for validator in self.validators:
validator(node, value)
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_forbidden = colander.Regex('^(project|source|target|id|created_at" \
+ "|updated_at|author|author_id)$')
database_name = All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
| import re
import colander
from colander import Invalid
from grano.logic.references import ProjectRef
from grano.core import db
from grano.model import Schema, Attribute
FORBIDDEN = ['project', 'source', 'target', 'id', 'created_at', 'updated_at', 'author', 'author_id']
database_forbidden = colander.Function(lambda v: v not in FORBIDDEN, message="Reserved name")
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_name = colander.All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
Fix handling of All() exceptions. import colander
from colander import Invalid
class All(object):
""" Composite validator which succeeds if none of its
subvalidators raises an :class:`colander.Invalid` exception"""
def __init__(self, *validators):
self.validators = validators
def __call__(self, node, value):
for validator in self.validators:
validator(node, value)
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_forbidden = colander.Regex('^(project|source|target|id|created_at" \
+ "|updated_at|author|author_id)$')
database_name = All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
| <commit_before>import re
import colander
from colander import Invalid
from grano.logic.references import ProjectRef
from grano.core import db
from grano.model import Schema, Attribute
FORBIDDEN = ['project', 'source', 'target', 'id', 'created_at', 'updated_at', 'author', 'author_id']
database_forbidden = colander.Function(lambda v: v not in FORBIDDEN, message="Reserved name")
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_name = colander.All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
<commit_msg>Fix handling of All() exceptions. <commit_after>import colander
from colander import Invalid
class All(object):
""" Composite validator which succeeds if none of its
subvalidators raises an :class:`colander.Invalid` exception"""
def __init__(self, *validators):
self.validators = validators
def __call__(self, node, value):
for validator in self.validators:
validator(node, value)
database_format = colander.Regex('^[a-zA-Z][a-zA-Z0-9_]+[a-zA-Z0-9]$')
database_forbidden = colander.Regex('^(project|source|target|id|created_at" \
+ "|updated_at|author|author_id)$')
database_name = All(database_format, database_forbidden)
class FixedValue(object):
def __init__(self, value):
self.value = value
def serialize(self, node, appstruct):
return colander.null
def deserialize(self, node, cstruct):
return self.value
def cstruct_children(self, node, cstruct):
return []
|
5ed5855efe09c92efbf93dab5eb0b37325072381 | opps/api/__init__.py | opps/api/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| Fix method get on ApiKeyAuthentication | Fix method get on ApiKeyAuthentication
| Python | mit | jeanmask/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
Fix method get on ApiKeyAuthentication | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
<commit_msg>Fix method get on ApiKeyAuthentication<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
Fix method get on ApiKeyAuthentication#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
<commit_msg>Fix method get on ApiKeyAuthentication<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
66c0b220188499a5871ee1fbe5b79f0a57db4ec9 | feder/tasks/filters.py | feder/tasks/filters.py | # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
| # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
| Add is_done filter for task | Add is_done filter for task
| Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
Add is_done filter for task | # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
| <commit_before># -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
<commit_msg>Add is_done filter for task<commit_after> | # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
| # -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
Add is_done filter for task# -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
| <commit_before># -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
<commit_msg>Add is_done filter for task<commit_after># -*- coding: utf-8 -*-
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
|
98392a42381470153fe2c13cda8e24cdccc5fe4b | hr_holidays_legal_leave/models/res_config.py | hr_holidays_legal_leave/models/res_config.py | # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self, fields):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
| # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
| Remove fields from method definition. | Remove fields from method definition.
| Python | agpl-3.0 | VitalPet/hr,feketemihai/hr,Eficent/hr,open-synergy/hr,VitalPet/hr,thinkopensolutions/hr,Eficent/hr,acsone/hr,open-synergy/hr,thinkopensolutions/hr,acsone/hr,feketemihai/hr | # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self, fields):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
Remove fields from method definition. | # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
| <commit_before># -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self, fields):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
<commit_msg>Remove fields from method definition.<commit_after> | # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
| # -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self, fields):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
Remove fields from method definition.# -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
| <commit_before># -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self, fields):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
<commit_msg>Remove fields from method definition.<commit_after># -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://www.@idtlabs.sl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models, api
class HumanResourcesConfiguration(models.TransientModel):
_inherit = 'hr.config.settings'
legal_holidays_status_id = fields.Many2one(
'hr.holidays.status',
'Legal Leave Status',
)
@api.model
def get_legal_holidays_status_id(self):
company = self.env.user.company_id
return {
'legal_holidays_status_id': company.legal_holidays_status_id.id,
}
@api.multi
def set_legal_holidays_status_id(self):
self.ensure_one()
company = self.env.user.company_id
company.legal_holidays_status_id = self.legal_holidays_status_id
|
c55a42737a99104734a79e946304849258bfa44b | aplib/__init__.py | aplib/__init__.py | # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from make_socket_for_ip import make_socket_for_ip
| # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wrap this in case coro has not been installed
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
| Allow aplib to be imported when coro is not installed. | Allow aplib to be imported when coro is not installed.
| Python | mit | ironport/aplib,ironport/aplib | # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from make_socket_for_ip import make_socket_for_ip
Allow aplib to be imported when coro is not installed. | # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wrap this in case coro has not been installed
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
| <commit_before># Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from make_socket_for_ip import make_socket_for_ip
<commit_msg>Allow aplib to be imported when coro is not installed.<commit_after> | # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wrap this in case coro has not been installed
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
| # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from make_socket_for_ip import make_socket_for_ip
Allow aplib to be imported when coro is not installed.# Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wrap this in case coro has not been installed
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
| <commit_before># Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from make_socket_for_ip import make_socket_for_ip
<commit_msg>Allow aplib to be imported when coro is not installed.<commit_after># Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wrap this in case coro has not been installed
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
|
f4701ac73f884ef28e62bb35adc81330ce512171 | goto_last_edit.py | goto_last_edit.py | import sublime_plugin
# the last edited Region, keyed to View.id
_last_edits = {}
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
_last_edits[view.id()] = view.sel()[0]
class GotoLastEdit(sublime_plugin.TextCommand):
def run(self, edit):
last_edit = _last_edits.get(self.view.id(), None)
if last_edit != None:
self.view.sel().clear()
self.view.sel().add(last_edit)
self.view.show(last_edit)
| import sublime, sublime_plugin
LAST_EDITS_SETTING = 'last_edits'
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
last_edits = view.settings().get(LAST_EDITS_SETTING, {})
edit_position = view.sel()[0]
last_edits[str(view.id())] = {'a': edit_position.a, 'b': edit_position.b}
view.settings().set(LAST_EDITS_SETTING, last_edits)
class GotoLastEdit(sublime_plugin.TextCommand):
# The position the cursor was at before the command fired. Saved when the
# command is run, so that if the user runs the command again before making
# another edit in the file, the cursor returns to its original position.
original_position = None
def move_cursor_to_region(self, region):
""" Clear the cursor's position and move it to `region`. """
cursor = self.view.sel()
self.original_position = cursor[0]
cursor.clear()
cursor.add(region)
self.view.show(region)
def run(self, edit):
"""
If there was a last edit recorded for the view, store the current
position as self.original_position and move the cursor to the position
of the last edit.
If the cursor is currently at the same position as the last edit, and
there `self.original_position` is available, then return the cursor to
its original position.
"""
last_edits = self.view.settings().get(LAST_EDITS_SETTING, {})
last_edit = last_edits.get(str(self.view.id()), None)
current_position = self.view.sel()[0]
if last_edit is None:
return
last_edit_region = sublime.Region(
long(last_edit['a']), long(last_edit['b']))
if self.original_position is not None \
and current_position == last_edit_region:
self.move_cursor_to_region(self.original_position)
return
self.move_cursor_to_region(last_edit_region)
| Return to original if cursor is already at least edit | Return to original if cursor is already at least edit
| Python | mit | abrookins/GotoLastEdit | import sublime_plugin
# the last edited Region, keyed to View.id
_last_edits = {}
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
_last_edits[view.id()] = view.sel()[0]
class GotoLastEdit(sublime_plugin.TextCommand):
def run(self, edit):
last_edit = _last_edits.get(self.view.id(), None)
if last_edit != None:
self.view.sel().clear()
self.view.sel().add(last_edit)
self.view.show(last_edit)
Return to original if cursor is already at least edit | import sublime, sublime_plugin
LAST_EDITS_SETTING = 'last_edits'
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
last_edits = view.settings().get(LAST_EDITS_SETTING, {})
edit_position = view.sel()[0]
last_edits[str(view.id())] = {'a': edit_position.a, 'b': edit_position.b}
view.settings().set(LAST_EDITS_SETTING, last_edits)
class GotoLastEdit(sublime_plugin.TextCommand):
# The position the cursor was at before the command fired. Saved when the
# command is run, so that if the user runs the command again before making
# another edit in the file, the cursor returns to its original position.
original_position = None
def move_cursor_to_region(self, region):
""" Clear the cursor's position and move it to `region`. """
cursor = self.view.sel()
self.original_position = cursor[0]
cursor.clear()
cursor.add(region)
self.view.show(region)
def run(self, edit):
"""
If there was a last edit recorded for the view, store the current
position as self.original_position and move the cursor to the position
of the last edit.
If the cursor is currently at the same position as the last edit, and
there `self.original_position` is available, then return the cursor to
its original position.
"""
last_edits = self.view.settings().get(LAST_EDITS_SETTING, {})
last_edit = last_edits.get(str(self.view.id()), None)
current_position = self.view.sel()[0]
if last_edit is None:
return
last_edit_region = sublime.Region(
long(last_edit['a']), long(last_edit['b']))
if self.original_position is not None \
and current_position == last_edit_region:
self.move_cursor_to_region(self.original_position)
return
self.move_cursor_to_region(last_edit_region)
| <commit_before>import sublime_plugin
# the last edited Region, keyed to View.id
_last_edits = {}
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
_last_edits[view.id()] = view.sel()[0]
class GotoLastEdit(sublime_plugin.TextCommand):
def run(self, edit):
last_edit = _last_edits.get(self.view.id(), None)
if last_edit != None:
self.view.sel().clear()
self.view.sel().add(last_edit)
self.view.show(last_edit)
<commit_msg>Return to original if cursor is already at least edit<commit_after> | import sublime, sublime_plugin
LAST_EDITS_SETTING = 'last_edits'
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
last_edits = view.settings().get(LAST_EDITS_SETTING, {})
edit_position = view.sel()[0]
last_edits[str(view.id())] = {'a': edit_position.a, 'b': edit_position.b}
view.settings().set(LAST_EDITS_SETTING, last_edits)
class GotoLastEdit(sublime_plugin.TextCommand):
# The position the cursor was at before the command fired. Saved when the
# command is run, so that if the user runs the command again before making
# another edit in the file, the cursor returns to its original position.
original_position = None
def move_cursor_to_region(self, region):
""" Clear the cursor's position and move it to `region`. """
cursor = self.view.sel()
self.original_position = cursor[0]
cursor.clear()
cursor.add(region)
self.view.show(region)
def run(self, edit):
"""
If there was a last edit recorded for the view, store the current
position as self.original_position and move the cursor to the position
of the last edit.
If the cursor is currently at the same position as the last edit, and
there `self.original_position` is available, then return the cursor to
its original position.
"""
last_edits = self.view.settings().get(LAST_EDITS_SETTING, {})
last_edit = last_edits.get(str(self.view.id()), None)
current_position = self.view.sel()[0]
if last_edit is None:
return
last_edit_region = sublime.Region(
long(last_edit['a']), long(last_edit['b']))
if self.original_position is not None \
and current_position == last_edit_region:
self.move_cursor_to_region(self.original_position)
return
self.move_cursor_to_region(last_edit_region)
| import sublime_plugin
# the last edited Region, keyed to View.id
_last_edits = {}
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
_last_edits[view.id()] = view.sel()[0]
class GotoLastEdit(sublime_plugin.TextCommand):
def run(self, edit):
last_edit = _last_edits.get(self.view.id(), None)
if last_edit != None:
self.view.sel().clear()
self.view.sel().add(last_edit)
self.view.show(last_edit)
Return to original if cursor is already at least editimport sublime, sublime_plugin
LAST_EDITS_SETTING = 'last_edits'
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
last_edits = view.settings().get(LAST_EDITS_SETTING, {})
edit_position = view.sel()[0]
last_edits[str(view.id())] = {'a': edit_position.a, 'b': edit_position.b}
view.settings().set(LAST_EDITS_SETTING, last_edits)
class GotoLastEdit(sublime_plugin.TextCommand):
# The position the cursor was at before the command fired. Saved when the
# command is run, so that if the user runs the command again before making
# another edit in the file, the cursor returns to its original position.
original_position = None
def move_cursor_to_region(self, region):
""" Clear the cursor's position and move it to `region`. """
cursor = self.view.sel()
self.original_position = cursor[0]
cursor.clear()
cursor.add(region)
self.view.show(region)
def run(self, edit):
"""
If there was a last edit recorded for the view, store the current
position as self.original_position and move the cursor to the position
of the last edit.
If the cursor is currently at the same position as the last edit, and
there `self.original_position` is available, then return the cursor to
its original position.
"""
last_edits = self.view.settings().get(LAST_EDITS_SETTING, {})
last_edit = last_edits.get(str(self.view.id()), None)
current_position = self.view.sel()[0]
if last_edit is None:
return
last_edit_region = sublime.Region(
long(last_edit['a']), long(last_edit['b']))
if self.original_position is not None \
and current_position == last_edit_region:
self.move_cursor_to_region(self.original_position)
return
self.move_cursor_to_region(last_edit_region)
| <commit_before>import sublime_plugin
# the last edited Region, keyed to View.id
_last_edits = {}
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
_last_edits[view.id()] = view.sel()[0]
class GotoLastEdit(sublime_plugin.TextCommand):
def run(self, edit):
last_edit = _last_edits.get(self.view.id(), None)
if last_edit != None:
self.view.sel().clear()
self.view.sel().add(last_edit)
self.view.show(last_edit)
<commit_msg>Return to original if cursor is already at least edit<commit_after>import sublime, sublime_plugin
LAST_EDITS_SETTING = 'last_edits'
class RecordLastEdit(sublime_plugin.EventListener):
def on_modified(self, view):
last_edits = view.settings().get(LAST_EDITS_SETTING, {})
edit_position = view.sel()[0]
last_edits[str(view.id())] = {'a': edit_position.a, 'b': edit_position.b}
view.settings().set(LAST_EDITS_SETTING, last_edits)
class GotoLastEdit(sublime_plugin.TextCommand):
# The position the cursor was at before the command fired. Saved when the
# command is run, so that if the user runs the command again before making
# another edit in the file, the cursor returns to its original position.
original_position = None
def move_cursor_to_region(self, region):
""" Clear the cursor's position and move it to `region`. """
cursor = self.view.sel()
self.original_position = cursor[0]
cursor.clear()
cursor.add(region)
self.view.show(region)
def run(self, edit):
"""
If there was a last edit recorded for the view, store the current
position as self.original_position and move the cursor to the position
of the last edit.
If the cursor is currently at the same position as the last edit, and
there `self.original_position` is available, then return the cursor to
its original position.
"""
last_edits = self.view.settings().get(LAST_EDITS_SETTING, {})
last_edit = last_edits.get(str(self.view.id()), None)
current_position = self.view.sel()[0]
if last_edit is None:
return
last_edit_region = sublime.Region(
long(last_edit['a']), long(last_edit['b']))
if self.original_position is not None \
and current_position == last_edit_region:
self.move_cursor_to_region(self.original_position)
return
self.move_cursor_to_region(last_edit_region)
|
3a80b55c8a46f46ecd5b458d0aee26b34e8ad1f5 | backend/core/app.py | backend/core/app.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'asdf')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'valdez')
User.create('sundar', 'raman')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
| Add another user in fixture code | Add another user in fixture code
| Python | unlicense | azlyth/tokens,azlyth/tokens,azlyth/tokens | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'asdf')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
Add another user in fixture code | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'valdez')
User.create('sundar', 'raman')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'asdf')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
<commit_msg>Add another user in fixture code<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'valdez')
User.create('sundar', 'raman')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'asdf')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
Add another user in fixture code#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'valdez')
User.create('sundar', 'raman')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'asdf')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
<commit_msg>Add another user in fixture code<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from .db import configure_database
class Application:
def __init__(self):
self.flask_app = Flask(__name__)
self.api = Api(self.flask_app)
self.import_configuration()
self.configure_database()
self.setup_api()
def import_configuration(self):
self.flask_app.config.from_object('core.settings')
def configure_database(self):
self.db = configure_database(self.flask_app)
def setup_api(self):
from .api import setup_api
setup_api(self)
def bootstrap(self):
from .models import User, Question, Category
if not Question.find_all():
User.create('peter', 'valdez')
User.create('sundar', 'raman')
Question.create(text='Is this what you want?')
Question.create(text='Really?')
Question.create(text='Are you sure?')
Category.create(name='Strength')
Category.create(name='Dexterity')
Category.create(name='Intelligence')
Category.create(name='Luck')
def run(self):
if self.flask_app.config['DEBUG']:
self.bootstrap()
self.flask_app.run(host='0.0.0.0')
|
c892077dd55405d71cbe6d6191c53260b74447d2 | hubblestack/extmods/grains/hostuuid.py | hubblestack/extmods/grains/hostuuid.py | # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(cached_uuid)
return {'host_uuid': generated}
| # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(generated)
return {'host_uuid': generated}
| Write the correct variable to disk | Write the correct variable to disk
Maybe I should get more sleep
| Python | apache-2.0 | basepi/hubble,basepi/hubble | # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(cached_uuid)
return {'host_uuid': generated}
Write the correct variable to disk
Maybe I should get more sleep | # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(generated)
return {'host_uuid': generated}
| <commit_before># -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(cached_uuid)
return {'host_uuid': generated}
<commit_msg>Write the correct variable to disk
Maybe I should get more sleep<commit_after> | # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(generated)
return {'host_uuid': generated}
| # -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(cached_uuid)
return {'host_uuid': generated}
Write the correct variable to disk
Maybe I should get more sleep# -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(generated)
return {'host_uuid': generated}
| <commit_before># -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(cached_uuid)
return {'host_uuid': generated}
<commit_msg>Write the correct variable to disk
Maybe I should get more sleep<commit_after># -*- coding: utf-8 -*-
'''
Generate a unique uuid for this host, storing it on disk so it persists across
restarts
'''
import logging
import os
import uuid
log = logging.getLogger(__name__)
def host_uuid():
'''
Generate a unique uuid for this host, storing it on disk so it persists
across restarts
'''
cached_uuid = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble_cached_uuid')
try:
if os.path.isfile(cached_uuid):
with open(cached_uuid, 'r') as f:
return {'host_uuid': f.read()}
except Exception as exc:
log.exception('Problem retrieving cached host uuid')
generated = uuid.uuid4()
with open(cached_uuid, 'w') as f:
f.write(generated)
return {'host_uuid': generated}
|
ebb0236d7c68883de7a4202df23e74becd943f29 | hooks/pre_gen_project.py | hooks/pre_gen_project.py | project_slug = '{{ cookiecutter.project_slug }}'
print('pre gen')
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
| project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
| Fix a typo in the pre-generation hooks | Fix a typo in the pre-generation hooks
| Python | bsd-3-clause | valerymelou/cookiecutter-django-gulp,valerymelou/cookiecutter-django-gulp,valerymelou/cookiecutter-django-gulp | project_slug = '{{ cookiecutter.project_slug }}'
print('pre gen')
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
Fix a typo in the pre-generation hooks | project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
| <commit_before>project_slug = '{{ cookiecutter.project_slug }}'
print('pre gen')
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
<commit_msg>Fix a typo in the pre-generation hooks<commit_after> | project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
| project_slug = '{{ cookiecutter.project_slug }}'
print('pre gen')
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
Fix a typo in the pre-generation hooksproject_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
| <commit_before>project_slug = '{{ cookiecutter.project_slug }}'
print('pre gen')
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
<commit_msg>Fix a typo in the pre-generation hooks<commit_after>project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
|
b7b38e67ae95a5d5bed68661c2e106149117fa80 | mendel/views.py | mendel/views.py | from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(context=context).count()
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
| from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(user=request.user, context=context).count()
# If there are existing reviews, delete them first
if existing_review_count:
for review in Review.objects.filter(user=request.user, context=context):
review.delete()
# TODO: Don't delete reviews for categories that are both in existing_reviews and in the request's categories
# Create a review for each category in the request
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
| Handle review deletion when updating categories for a context | Handle review deletion when updating categories for a context
| Python | agpl-3.0 | Architizer/mendel,Architizer/mendel,Architizer/mendel,Architizer/mendel | from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(context=context).count()
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
Handle review deletion when updating categories for a context | from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(user=request.user, context=context).count()
# If there are existing reviews, delete them first
if existing_review_count:
for review in Review.objects.filter(user=request.user, context=context):
review.delete()
# TODO: Don't delete reviews for categories that are both in existing_reviews and in the request's categories
# Create a review for each category in the request
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
| <commit_before>from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(context=context).count()
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
<commit_msg>Handle review deletion when updating categories for a context<commit_after> | from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(user=request.user, context=context).count()
# If there are existing reviews, delete them first
if existing_review_count:
for review in Review.objects.filter(user=request.user, context=context):
review.delete()
# TODO: Don't delete reviews for categories that are both in existing_reviews and in the request's categories
# Create a review for each category in the request
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
| from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(context=context).count()
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
Handle review deletion when updating categories for a contextfrom django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(user=request.user, context=context).count()
# If there are existing reviews, delete them first
if existing_review_count:
for review in Review.objects.filter(user=request.user, context=context):
review.delete()
# TODO: Don't delete reviews for categories that are both in existing_reviews and in the request's categories
# Create a review for each category in the request
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
| <commit_before>from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(context=context).count()
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
<commit_msg>Handle review deletion when updating categories for a context<commit_after>from django.conf import settings
from django.shortcuts import render
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from models import Context, Review, Keyword, Category
def index(request):
context = {'DEBUG': settings.DEBUG}
return render(request, 'index.html', context)
class PostContext(APIView):
def post(self, request, id):
context = Context.objects.get(id=id)
counter = 0
keyword_proposed = context.keyword_given
created = False
if request.data.get('keyword_proposed'):
keyword_proposed, created = Keyword.objects.get_or_create(
name=request.data.get('keyword_proposed')['name']
)
existing_review_count = Review.objects.filter(user=request.user, context=context).count()
# If there are existing reviews, delete them first
if existing_review_count:
for review in Review.objects.filter(user=request.user, context=context):
review.delete()
# TODO: Don't delete reviews for categories that are both in existing_reviews and in the request's categories
# Create a review for each category in the request
if not existing_review_count:
for category in request.data.get('categories'):
Review.objects.create(
context=context,
category=Category.objects.get(id=category),
keyword_given=context.keyword_given,
keyword_proposed=keyword_proposed,
user=request.user,
status=Review.PENDING)
counter += 1
return Response({
"keyword_created": created,
"keyword_proposed": keyword_proposed.name,
"reviews_created": counter,
"existing_review_count": existing_review_count
})
|
5748265d5102ee69e928d65ff3d40779af120dac | count-inversions/count_inversions.py | count-inversions/count_inversions.py | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | Implement merge component of merge_and_count_split | Implement merge component of merge_and_count_split
The merging part of merge_and_count_split was is taken right out
of merge-sort. A test revealed a mistake in merge_and_sort where
0 was returned instead of the array when the array has one
element, so that was fixed.
| Python | mit | timpel/stanford-algs,timpel/stanford-algs | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)Implement merge component of merge_and_count_split
The merging part of merge_and_count_split was is taken right out
of merge-sort. A test revealed a mistake in merge_and_sort where
0 was returned instead of the array when the array has one
element, so that was fixed. | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | <commit_before>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)<commit_msg>Implement merge component of merge_and_count_split
The merging part of merge_and_count_split was is taken right out
of merge-sort. A test revealed a mistake in merge_and_sort where
0 was returned instead of the array when the array has one
element, so that was fixed.<commit_after> | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)Implement merge component of merge_and_count_split
The merging part of merge_and_count_split was is taken right out
of merge-sort. A test revealed a mistake in merge_and_sort where
0 was returned instead of the array when the array has one
element, so that was fixed.from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | <commit_before>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)<commit_msg>Implement merge component of merge_and_count_split
The merging part of merge_and_count_split was is taken right out
of merge-sort. A test revealed a mistake in merge_and_sort where
0 was returned instead of the array when the array has one
element, so that was fixed.<commit_after>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) |
2f26197d10a1c7cfc010074576c7e1a2c2a31e78 | data_structures/bitorrent/torrent.py | data_structures/bitorrent/torrent.py | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| Use __getitem__ to improve readbility | Use __getitem__ to improve readbility
| Python | apache-2.0 | vtemian/university_projects,vtemian/university_projects,vtemian/university_projects | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
Use __getitem__ to improve readbility | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| <commit_before>import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
<commit_msg>Use __getitem__ to improve readbility<commit_after> | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
Use __getitem__ to improve readbilityimport hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| <commit_before>import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
<commit_msg>Use __getitem__ to improve readbility<commit_after>import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
|
9caa0aa6c8fddc8a21997cf4df88d407b1598412 | keras_cv/__init__.py | keras_cv/__init__.py | # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
| # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import models
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
| Add export for models module | Add export for models module
| Python | apache-2.0 | keras-team/keras-cv,keras-team/keras-cv,keras-team/keras-cv | # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
Add export for models module | # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import models
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
| <commit_before># Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
<commit_msg>Add export for models module<commit_after> | # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import models
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
| # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
Add export for models module# Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import models
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
| <commit_before># Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
<commit_msg>Add export for models module<commit_after># Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import models
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
|
4e18d3d3c35e92d6b800f347b65765ca36d6148b | playserver/track.py | playserver/track.py | import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
result = osascript.osascript(fullCommand)
#Return the return code of the command - 0 if succuessful.
return result[0]
| import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
return osascript.osascript(fullCommand)
| Return full osascript output from getCurrentConfig | Return full osascript output from getCurrentConfig
| Python | mit | ollien/playserver,ollien/playserver,ollien/playserver | import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
result = osascript.osascript(fullCommand)
#Return the return code of the command - 0 if succuessful.
return result[0]
Return full osascript output from getCurrentConfig | import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
return osascript.osascript(fullCommand)
| <commit_before>import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
result = osascript.osascript(fullCommand)
#Return the return code of the command - 0 if succuessful.
return result[0]
<commit_msg>Return full osascript output from getCurrentConfig<commit_after> | import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
return osascript.osascript(fullCommand)
| import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
result = osascript.osascript(fullCommand)
#Return the return code of the command - 0 if succuessful.
return result[0]
Return full osascript output from getCurrentConfigimport configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
return osascript.osascript(fullCommand)
| <commit_before>import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
result = osascript.osascript(fullCommand)
#Return the return code of the command - 0 if succuessful.
return result[0]
<commit_msg>Return full osascript output from getCurrentConfig<commit_after>import configmanager
import osascript
APP_CONFIG_PATH = "../applications/"
applicationConfigs = configmanager.ConfigManager(APP_CONFIG_PATH)
#TODO: Make this user choosable
currentApplication = "radiant"
def getCurrentConfig():
return applicationConfigs[currentApplication]
def _executeCommand(command):
config = getCurrentConfig()
fullCommand = "tell application \"{}\" to {}".format(config["name"],
config["commands"][command])
return osascript.osascript(fullCommand)
|
e7a8c76c1f8f07866a4b7ea55870dacb5c76ef90 | face/tests/model_tests.py | face/tests/model_tests.py | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertEqual(self.face.get_absolute_url(),'/categories/faces/s/lokesh/') | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertRegexpMatches(self.face.get_absolute_url(),'/categories/faces/s/lokesh/?') | Fix unit test for seo faces url | Fix unit test for seo faces url
| Python | bsd-3-clause | PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertEqual(self.face.get_absolute_url(),'/categories/faces/s/lokesh/')Fix unit test for seo faces url | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertRegexpMatches(self.face.get_absolute_url(),'/categories/faces/s/lokesh/?') | <commit_before>from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertEqual(self.face.get_absolute_url(),'/categories/faces/s/lokesh/')<commit_msg>Fix unit test for seo faces url<commit_after> | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertRegexpMatches(self.face.get_absolute_url(),'/categories/faces/s/lokesh/?') | from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertEqual(self.face.get_absolute_url(),'/categories/faces/s/lokesh/')Fix unit test for seo faces urlfrom django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertRegexpMatches(self.face.get_absolute_url(),'/categories/faces/s/lokesh/?') | <commit_before>from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertEqual(self.face.get_absolute_url(),'/categories/faces/s/lokesh/')<commit_msg>Fix unit test for seo faces url<commit_after>from django.test import TestCase
from functional_tests.factory import FaceFactory
class Facetest(TestCase):
def setUp(self):
self.face = FaceFactory(title='Lokesh')
def test_title_to_share_returns_meet_Lokesh__farmer_from_sivaganga_tamil_nadu(self):
self.assertEqual(self.face.title_to_share,'Meet Lokesh, farmer from Sivaganga, Tamil Nadu')
def test_featured_image_returnes_the_image(self):
self.assertEqual(self.face.featured_image,self.face.image)
def test_to_str_returns_lokesh_sivaganga(self):
self.assertEqual(str(self.face),'Lokesh Sivaganga')
def test_get_absolute_url_return_path_with_faces_s_face_page(self):
self.assertRegexpMatches(self.face.get_absolute_url(),'/categories/faces/s/lokesh/?') |
8df691acaebffc343dac4535a64f8a809607558a | Demo/sgi/cd/cdaiff.py | Demo/sgi/cd/cdaiff.py | import sys
import readcd
import aiff
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writesampsraw(data)
def main():
if len(sys.argv) > 1:
a = aiff.Aiff().init(sys.argv[1], 'w')
else:
a = aiff.Aiff().init('@', 'w')
a.sampwidth = AL.SAMPLE_16
a.nchannels = AL.STEREO
a.samprate = AL.RATE_44100
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.destroy()
main()
| import sys
import readcd
import aifc
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writeframesraw(data)
def main():
if len(sys.argv) > 1:
a = aifc.open(sys.argv[1], 'w')
else:
a = aifc.open('@', 'w')
a.setsampwidth(AL.SAMPLE_16)
a.setnchannels(AL.STEREO)
a.setframerate(AL.RATE_44100)
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.close()
main()
| Use module aifc instead of module aiff. | Use module aifc instead of module aiff.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | import sys
import readcd
import aiff
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writesampsraw(data)
def main():
if len(sys.argv) > 1:
a = aiff.Aiff().init(sys.argv[1], 'w')
else:
a = aiff.Aiff().init('@', 'w')
a.sampwidth = AL.SAMPLE_16
a.nchannels = AL.STEREO
a.samprate = AL.RATE_44100
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.destroy()
main()
Use module aifc instead of module aiff. | import sys
import readcd
import aifc
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writeframesraw(data)
def main():
if len(sys.argv) > 1:
a = aifc.open(sys.argv[1], 'w')
else:
a = aifc.open('@', 'w')
a.setsampwidth(AL.SAMPLE_16)
a.setnchannels(AL.STEREO)
a.setframerate(AL.RATE_44100)
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.close()
main()
| <commit_before>import sys
import readcd
import aiff
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writesampsraw(data)
def main():
if len(sys.argv) > 1:
a = aiff.Aiff().init(sys.argv[1], 'w')
else:
a = aiff.Aiff().init('@', 'w')
a.sampwidth = AL.SAMPLE_16
a.nchannels = AL.STEREO
a.samprate = AL.RATE_44100
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.destroy()
main()
<commit_msg>Use module aifc instead of module aiff.<commit_after> | import sys
import readcd
import aifc
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writeframesraw(data)
def main():
if len(sys.argv) > 1:
a = aifc.open(sys.argv[1], 'w')
else:
a = aifc.open('@', 'w')
a.setsampwidth(AL.SAMPLE_16)
a.setnchannels(AL.STEREO)
a.setframerate(AL.RATE_44100)
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.close()
main()
| import sys
import readcd
import aiff
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writesampsraw(data)
def main():
if len(sys.argv) > 1:
a = aiff.Aiff().init(sys.argv[1], 'w')
else:
a = aiff.Aiff().init('@', 'w')
a.sampwidth = AL.SAMPLE_16
a.nchannels = AL.STEREO
a.samprate = AL.RATE_44100
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.destroy()
main()
Use module aifc instead of module aiff.import sys
import readcd
import aifc
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writeframesraw(data)
def main():
if len(sys.argv) > 1:
a = aifc.open(sys.argv[1], 'w')
else:
a = aifc.open('@', 'w')
a.setsampwidth(AL.SAMPLE_16)
a.setnchannels(AL.STEREO)
a.setframerate(AL.RATE_44100)
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.close()
main()
| <commit_before>import sys
import readcd
import aiff
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writesampsraw(data)
def main():
if len(sys.argv) > 1:
a = aiff.Aiff().init(sys.argv[1], 'w')
else:
a = aiff.Aiff().init('@', 'w')
a.sampwidth = AL.SAMPLE_16
a.nchannels = AL.STEREO
a.samprate = AL.RATE_44100
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.destroy()
main()
<commit_msg>Use module aifc instead of module aiff.<commit_after>import sys
import readcd
import aifc
import AL
import CD
Error = 'cdaiff.Error'
def writeaudio(a, type, data):
a.writeframesraw(data)
def main():
if len(sys.argv) > 1:
a = aifc.open(sys.argv[1], 'w')
else:
a = aifc.open('@', 'w')
a.setsampwidth(AL.SAMPLE_16)
a.setnchannels(AL.STEREO)
a.setframerate(AL.RATE_44100)
r = readcd.Readcd().init()
for arg in sys.argv[2:]:
x = eval(arg)
try:
if len(x) <> 2:
raise Error, 'bad argument'
r.appendstretch(x[0], x[1])
except TypeError:
r.appendtrack(x)
r.setcallback(CD.AUDIO, writeaudio, a)
r.play()
a.close()
main()
|
b097675e5906f7b0e9c050110fea58e40491814b | music/api.py | music/api.py | from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
| from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
| Allow filtering and ordering on API | Allow filtering and ordering on API
| Python | bsd-3-clause | praekelt/jmbo-music,praekelt/jmbo-music | from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
Allow filtering and ordering on API | from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
| <commit_before>from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
<commit_msg>Allow filtering and ordering on API<commit_after> | from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
| from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
Allow filtering and ordering on APIfrom django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
| <commit_before>from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
<commit_msg>Allow filtering and ordering on API<commit_after>from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
61bfc62937176b580b8b6ae12a90c5b76b00d50d | libcloud/__init__.py | libcloud/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
__version__ = "0.1.1-dev" | Add version string to libcloud | Add version string to libcloud
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@895867 13f79535-47bb-0310-9956-ffa450edef68
| Python | apache-2.0 | Kami/libcloud,jimbobhickville/libcloud,ClusterHQ/libcloud,apache/libcloud,DimensionDataCBUSydney/libcloud,curoverse/libcloud,smaffulli/libcloud,schaubl/libcloud,techhat/libcloud,DimensionDataCBUSydney/libcloud,cryptickp/libcloud,cloudControl/libcloud,supertom/libcloud,ZuluPro/libcloud,Verizon/libcloud,t-tran/libcloud,Scalr/libcloud,lochiiconnectivity/libcloud,iPlantCollaborativeOpenSource/libcloud,MrBasset/libcloud,mtekel/libcloud,wuyuewen/libcloud,NexusIS/libcloud,supertom/libcloud,jerryblakley/libcloud,andrewsomething/libcloud,Cloud-Elasticity-Services/as-libcloud,Itxaka/libcloud,ByteInternet/libcloud,dcorbacho/libcloud,jerryblakley/libcloud,MrBasset/libcloud,aviweit/libcloud,wuyuewen/libcloud,aleGpereira/libcloud,sergiorua/libcloud,Verizon/libcloud,watermelo/libcloud,pquentin/libcloud,Itxaka/libcloud,briancurtin/libcloud,illfelder/libcloud,sahildua2305/libcloud,samuelchong/libcloud,Keisuke69/libcloud,sfriesel/libcloud,Scalr/libcloud,Verizon/libcloud,atsaki/libcloud,samuelchong/libcloud,thesquelched/libcloud,mtekel/libcloud,Jc2k/libcloud,briancurtin/libcloud,munkiat/libcloud,ZuluPro/libcloud,JamesGuthrie/libcloud,wrigri/libcloud,lochiiconnectivity/libcloud,andrewsomething/libcloud,jimbobhickville/libcloud,Kami/libcloud,SecurityCompass/libcloud,wido/libcloud,dcorbacho/libcloud,mgogoulos/libcloud,atsaki/libcloud,aviweit/libcloud,aleGpereira/libcloud,mtekel/libcloud,kater169/libcloud,ninefold/libcloud,apache/libcloud,niteoweb/libcloud,iPlantCollaborativeOpenSource/libcloud,marcinzaremba/libcloud,cryptickp/libcloud,marcinzaremba/libcloud,aviweit/libcloud,andrewsomething/libcloud,Jc2k/libcloud,SecurityCompass/libcloud,dcorbacho/libcloud,techhat/libcloud,carletes/libcloud,mgogoulos/libcloud,carletes/libcloud,MrBasset/libcloud,apache/libcloud,sgammon/libcloud,smaffulli/libcloud,t-tran/libcloud,mbrukman/libcloud,SecurityCompass/libcloud,cloudControl/libcloud,JamesGuthrie/libcloud,JamesGuthrie/libcloud,kater169/libcloud,DimensionDataCBUSydney/libcloud,ZuluPro/libcloud,wuyuewen/libcloud,pantheon-systems/libcloud,aleGpereira/libcloud,curoverse/libcloud,StackPointCloud/libcloud,niteoweb/libcloud,niteoweb/libcloud,lochiiconnectivity/libcloud,StackPointCloud/libcloud,t-tran/libcloud,mbrukman/libcloud,jimbobhickville/libcloud,mistio/libcloud,jerryblakley/libcloud,curoverse/libcloud,watermelo/libcloud,NexusIS/libcloud,sergiorua/libcloud,vongazman/libcloud,munkiat/libcloud,Cloud-Elasticity-Services/as-libcloud,marcinzaremba/libcloud,mathspace/libcloud,sahildua2305/libcloud,mathspace/libcloud,erjohnso/libcloud,illfelder/libcloud,Kami/libcloud,watermelo/libcloud,erjohnso/libcloud,cloudControl/libcloud,pquentin/libcloud,samuelchong/libcloud,mistio/libcloud,schaubl/libcloud,techhat/libcloud,mgogoulos/libcloud,thesquelched/libcloud,sfriesel/libcloud,kater169/libcloud,mbrukman/libcloud,sgammon/libcloud,erjohnso/libcloud,pquentin/libcloud,wido/libcloud,supertom/libcloud,briancurtin/libcloud,ByteInternet/libcloud,thesquelched/libcloud,cryptickp/libcloud,ByteInternet/libcloud,ClusterHQ/libcloud,munkiat/libcloud,illfelder/libcloud,mathspace/libcloud,Cloud-Elasticity-Services/as-libcloud,carletes/libcloud,sfriesel/libcloud,iPlantCollaborativeOpenSource/libcloud,Scalr/libcloud,vongazman/libcloud,vongazman/libcloud,NexusIS/libcloud,sahildua2305/libcloud,smaffulli/libcloud,ninefold/libcloud,Keisuke69/libcloud,sergiorua/libcloud,atsaki/libcloud,schaubl/libcloud,pantheon-systems/libcloud,StackPointCloud/libcloud,wrigri/libcloud,mistio/libcloud,wido/libcloud,Itxaka/libcloud,pantheon-systems/libcloud,wrigri/libcloud | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
Add version string to libcloud
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@895867 13f79535-47bb-0310-9956-ffa450edef68 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
__version__ = "0.1.1-dev" | <commit_before># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
<commit_msg>Add version string to libcloud
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@895867 13f79535-47bb-0310-9956-ffa450edef68<commit_after> | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
__version__ = "0.1.1-dev" | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
Add version string to libcloud
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@895867 13f79535-47bb-0310-9956-ffa450edef68# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
__version__ = "0.1.1-dev" | <commit_before># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
<commit_msg>Add version string to libcloud
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@895867 13f79535-47bb-0310-9956-ffa450edef68<commit_after># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
"""
__version__ = "0.1.1-dev" |
372ff487c068da2b31cd25e550e8dcd7bd12d17d | openprocurement/tender/esco/adapters.py | openprocurement/tender/esco/adapters.py | # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
| # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
# Param to set awarding criteria field
awarding_criteria_key = 'amountPerfomance'
| Add awarding criteria field to configurator | Add awarding criteria field to configurator
| Python | apache-2.0 | openprocurement/openprocurement.tender.esco | # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
Add awarding criteria field to configurator | # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
# Param to set awarding criteria field
awarding_criteria_key = 'amountPerfomance'
| <commit_before># -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
<commit_msg>Add awarding criteria field to configurator<commit_after> | # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
# Param to set awarding criteria field
awarding_criteria_key = 'amountPerfomance'
| # -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
Add awarding criteria field to configurator# -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
# Param to set awarding criteria field
awarding_criteria_key = 'amountPerfomance'
| <commit_before># -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
<commit_msg>Add awarding criteria field to configurator<commit_after># -*- coding: utf-8 -*-
from openprocurement.tender.openeu.adapters import TenderAboveThresholdEUConfigurator
from openprocurement.tender.esco.models import Tender
class TenderESCOConfigurator(TenderAboveThresholdEUConfigurator):
""" ESCO Tender configuration adapter """
name = "esco Tender configurator"
model = Tender
# Param to configure award criteria - awards are generated from higher to lower by value.amount
reverse_awarding_criteria = True
# Param to set awarding criteria field
awarding_criteria_key = 'amountPerfomance'
|
b94e4971e3bcde718c40dc963a995e8afa5cf99f | examples/rate_limiting_test.py | examples/rate_limiting_test.py | from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(4) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(10):
self.print_item(item)
| from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(1,11):
self.print_item(item)
| Make the rate-limiting test more fancy | Make the rate-limiting test more fancy
| Python | mit | mdmintz/SeleniumBase,mdmintz/seleniumspot,possoumous/Watchers,ktp420/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,possoumous/Watchers,possoumous/Watchers,ktp420/SeleniumBase,mdmintz/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,possoumous/Watchers,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase | from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(4) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(10):
self.print_item(item)
Make the rate-limiting test more fancy | from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(1,11):
self.print_item(item)
| <commit_before>from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(4) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(10):
self.print_item(item)
<commit_msg>Make the rate-limiting test more fancy<commit_after> | from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(1,11):
self.print_item(item)
| from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(4) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(10):
self.print_item(item)
Make the rate-limiting test more fancyfrom seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(1,11):
self.print_item(item)
| <commit_before>from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(4) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(10):
self.print_item(item)
<commit_msg>Make the rate-limiting test more fancy<commit_after>from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print item
def test_rate_limited_printing(self):
print "\nRunning rate-limited print test:"
for item in xrange(1,11):
self.print_item(item)
|
50f577e63fe58531447dc0bc2eed80859d3aa1ad | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create script to save documentation to a file | 4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after> | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) |
6d25dcdb5eaca6d0d0404b4104017a18076174f8 | mass/utils.py | mass/utils.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1):
"""Submit mass job to SWF with specific priority.
"""
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
| Raise UnsupportedScheduler if specific scheduler for submit is not supported (Now just swf is supported). | Raise UnsupportedScheduler if specific scheduler for submit is not
supported (Now just swf is supported).
| Python | apache-2.0 | KKBOX/mass,badboy99tw/mass,KKBOX/mass,badboy99tw/mass,badboy99tw/mass,KKBOX/mass | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1):
"""Submit mass job to SWF with specific priority.
"""
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
Raise UnsupportedScheduler if specific scheduler for submit is not
supported (Now just swf is supported). | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1):
"""Submit mass job to SWF with specific priority.
"""
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
<commit_msg>Raise UnsupportedScheduler if specific scheduler for submit is not
supported (Now just swf is supported).<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1):
"""Submit mass job to SWF with specific priority.
"""
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
Raise UnsupportedScheduler if specific scheduler for submit is not
supported (Now just swf is supported).#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1):
"""Submit mass job to SWF with specific priority.
"""
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
<commit_msg>Raise UnsupportedScheduler if specific scheduler for submit is not
supported (Now just swf is supported).<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
from mass.scheduler.swf import config
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
import boto3
client = boto3.client('swf', region_name=config.REGION)
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
|
adb265a57baed6a94f83ba13f88342313ad78566 | tests/adapter.py | tests/adapter.py | """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry):
"""Mock store_entry"""
pass
def store_response(self, response):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
| """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry: dict):
"""Mock store_entry"""
pass
def store_response(self, response: dict):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id: str):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
| Add type annotations to MockStorageAdapter methods | Add type annotations to MockStorageAdapter methods
| Python | mit | tjmcginnis/tmj | """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry):
"""Mock store_entry"""
pass
def store_response(self, response):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
Add type annotations to MockStorageAdapter methods | """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry: dict):
"""Mock store_entry"""
pass
def store_response(self, response: dict):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id: str):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
| <commit_before>"""adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry):
"""Mock store_entry"""
pass
def store_response(self, response):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
<commit_msg>Add type annotations to MockStorageAdapter methods<commit_after> | """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry: dict):
"""Mock store_entry"""
pass
def store_response(self, response: dict):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id: str):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
| """adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry):
"""Mock store_entry"""
pass
def store_response(self, response):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
Add type annotations to MockStorageAdapter methods"""adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry: dict):
"""Mock store_entry"""
pass
def store_response(self, response: dict):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id: str):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
| <commit_before>"""adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry):
"""Mock store_entry"""
pass
def store_response(self, response):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
<commit_msg>Add type annotations to MockStorageAdapter methods<commit_after>"""adapter
Mock storage adapter class for unit tests
"""
class MockStorageAdapter:
"""Mock storage adapter class.
Will be patched for testing purposes
"""
def store_entry(self, entry: dict):
"""Mock store_entry"""
pass
def store_response(self, response: dict):
"""Mock store_response"""
pass
def get_all_entries(self):
"""Mock get_all_entries"""
pass
def get_entry_responses(self, entry_id: str):
"""Mock get_entry_responses"""
pass
def get_last_entry(self):
"""Mock get_last_entry"""
pass
def get_prompts(self):
"""Mock get_prompts"""
pass
|
915001e0e46fef2cf1e6b1c78614d0254b9db21e | backoff/_wait_gen.py | backoff/_wait_gen.py | # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
| # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
| Fix spelling of exponential in pydoc | Fix spelling of exponential in pydoc | Python | mit | litl/backoff | # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
Fix spelling of exponential in pydoc | # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
| <commit_before># coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
<commit_msg>Fix spelling of exponential in pydoc<commit_after> | # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
| # coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
Fix spelling of exponential in pydoc# coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
| <commit_before># coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
<commit_msg>Fix spelling of exponential in pydoc<commit_after># coding:utf-8
import itertools
def expo(base=2, factor=1, max_value=None):
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value=None):
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(interval=1):
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
try:
itr = iter(interval)
except TypeError:
itr = itertools.repeat(interval)
for val in itr:
yield val
|
32126085f361489bb5c9c18972479b0c313c7d10 | bash_runner/tasks.py | bash_runner/tasks.py | """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
| """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
| Send the output to a tmp file | Send the output to a tmp file
| Python | apache-2.0 | rantav/cosmo-plugin-bash-runner | """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
Send the output to a tmp file | """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
| <commit_before>"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
<commit_msg>Send the output to a tmp file<commit_after> | """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
| """
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
Send the output to a tmp file"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
| <commit_before>"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
<commit_msg>Send the output to a tmp file<commit_after>"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
7c460a174b1c461c76d08161698ead77e1236f15 | redshirt/analyze.py | redshirt/analyze.py | import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
| import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_li
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_li(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
| Use Li method for thresholding instead of Otsu | Use Li method for thresholding instead of Otsu
| Python | mit | jni/python-redshirt | import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
Use Li method for thresholding instead of Otsu | import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_li
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_li(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
| <commit_before>import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
<commit_msg>Use Li method for thresholding instead of Otsu<commit_after> | import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_li
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_li(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
| import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
Use Li method for thresholding instead of Otsuimport numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_li
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_li(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
| <commit_before>import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
<commit_msg>Use Li method for thresholding instead of Otsu<commit_after>import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_li
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_li(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == (np.argmax(region_sizes[1:]) + 1)
def extract_trace(image, axis=-1):
"""Get a mean intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0) / np.sum(roi)
return trace, roi
|
637651e572d9bcd4049ad5351f7fde1869c6823a | onadata/libs/authentication.py | onadata/libs/authentication.py | from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
| from django.conf import settings
from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if settings.TESTING_MODE is False and \
user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
| Allow HTTP auth during tests | Allow HTTP auth during tests
| Python | bsd-2-clause | kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat | from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
Allow HTTP auth during tests | from django.conf import settings
from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if settings.TESTING_MODE is False and \
user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
| <commit_before>from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
<commit_msg>Allow HTTP auth during tests<commit_after> | from django.conf import settings
from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if settings.TESTING_MODE is False and \
user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
| from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
Allow HTTP auth during testsfrom django.conf import settings
from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if settings.TESTING_MODE is False and \
user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
| <commit_before>from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
<commit_msg>Allow HTTP auth during tests<commit_after>from django.conf import settings
from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import (
BaseAuthentication, get_authorization_header,
BasicAuthentication)
from rest_framework.exceptions import AuthenticationFailed
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class HttpsOnlyBasicAuthentication(BasicAuthentication):
def authenticate(self, request):
# The parent class can discern whether basic authentication is even
# being attempted; if it isn't, we need to gracefully defer to other
# authenticators
user_auth = super(HttpsOnlyBasicAuthentication, self).authenticate(
request)
if settings.TESTING_MODE is False and \
user_auth is not None and not request.is_secure():
# Scold the user if they provided correct credentials for basic
# auth but didn't use HTTPS
raise AuthenticationFailed(_(
u'Using basic authentication without HTTPS transmits '
u'credentials in clear text! You MUST connect via HTTPS '
u'to use basic authentication.'
))
return user_auth
|
c5096a3370ad9b4fff428580e1b3c0c7de1399ce | scripts/set_ports.py | scripts/set_ports.py | #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
def get_port_mappings(container):
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
set_tag(container, "ports", json.dumps(get_port_mappings(container)))
| #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
container_id=None
def get_port_mappings(container):
global container_id
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
for k, v in get_port_mappings(container).items():
set_tag(container, 'port:%s' % k, v)
set_tag(container, 'container_id', container_id)
host_member = os.popen('serf members --detailed --format json --name `hostname`')
addr = json.loads(host_member.read().strip())['members'][0]['addr'].split(':')[0]
set_tag(container, 'ext_ip', addr)
| Set container_id and ext_ip container serf tags. | Set container_id and ext_ip container serf tags.
| Python | apache-2.0 | johanatan/datt-metadatt,johanatan/datt-metadatt,dattlabs/datt-metadatt,johanatan/datt-metadatt,johanatan/datt-metadatt,dattlabs/datt-metadatt,dattlabs/datt-metadatt,dattlabs/datt-metadatt | #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
def get_port_mappings(container):
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
set_tag(container, "ports", json.dumps(get_port_mappings(container)))
Set container_id and ext_ip container serf tags. | #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
container_id=None
def get_port_mappings(container):
global container_id
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
for k, v in get_port_mappings(container).items():
set_tag(container, 'port:%s' % k, v)
set_tag(container, 'container_id', container_id)
host_member = os.popen('serf members --detailed --format json --name `hostname`')
addr = json.loads(host_member.read().strip())['members'][0]['addr'].split(':')[0]
set_tag(container, 'ext_ip', addr)
| <commit_before>#!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
def get_port_mappings(container):
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
set_tag(container, "ports", json.dumps(get_port_mappings(container)))
<commit_msg>Set container_id and ext_ip container serf tags.<commit_after> | #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
container_id=None
def get_port_mappings(container):
global container_id
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
for k, v in get_port_mappings(container).items():
set_tag(container, 'port:%s' % k, v)
set_tag(container, 'container_id', container_id)
host_member = os.popen('serf members --detailed --format json --name `hostname`')
addr = json.loads(host_member.read().strip())['members'][0]['addr'].split(':')[0]
set_tag(container, 'ext_ip', addr)
| #!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
def get_port_mappings(container):
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
set_tag(container, "ports", json.dumps(get_port_mappings(container)))
Set container_id and ext_ip container serf tags.#!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
container_id=None
def get_port_mappings(container):
global container_id
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
for k, v in get_port_mappings(container).items():
set_tag(container, 'port:%s' % k, v)
set_tag(container, 'container_id', container_id)
host_member = os.popen('serf members --detailed --format json --name `hostname`')
addr = json.loads(host_member.read().strip())['members'][0]['addr'].split(':')[0]
set_tag(container, 'ext_ip', addr)
| <commit_before>#!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
def get_port_mappings(container):
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
set_tag(container, "ports", json.dumps(get_port_mappings(container)))
<commit_msg>Set container_id and ext_ip container serf tags.<commit_after>#!/usr/bin/env python -B
from optparse import OptionParser
import inspect
import json
import os
container_id=None
def get_port_mappings(container):
global container_id
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
container_id = os.popen('cat %s/../containers/%s/host.id' % (script_dir, container)).read().strip()
info = os.popen('docker inspect %s' % container_id).read().strip()
parsed = json.loads(info)[0]
return {k.split('/')[0]:v[0]['HostPort'] for k,v in parsed['NetworkSettings']['Ports'].items()}
def rpc_port(container):
return get_port_mappings(container)['7373']
if __name__ == '__main__':
from set_tag import set_tag, check_container_count
parser = OptionParser(usage="usage: %prog container")
(options, args) = parser.parse_args()
if len(args) != 1: parser.print_help(); exit(1)
container = args[0]
check_container_count(container)
for k, v in get_port_mappings(container).items():
set_tag(container, 'port:%s' % k, v)
set_tag(container, 'container_id', container_id)
host_member = os.popen('serf members --detailed --format json --name `hostname`')
addr = json.loads(host_member.read().strip())['members'][0]['addr'].split(':')[0]
set_tag(container, 'ext_ip', addr)
|
aa11d654d59dc604fa71b2e73a56338aff7a23e7 | pymake/setup.py | pymake/setup.py | """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.0',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
| """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.1',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
| Update pymake version and post twine | Update pymake version and post twine
| Python | mit | jpanikulam/experiments,jpanikulam/experiments,jpanikulam/experiments,jpanikulam/experiments | """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.0',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
Update pymake version and post twine | """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.1',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
| <commit_before>"""cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.0',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
<commit_msg>Update pymake version and post twine<commit_after> | """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.1',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
| """cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.0',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
Update pymake version and post twine"""cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.1',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
| <commit_before>"""cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.0',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
<commit_msg>Update pymake version and post twine<commit_after>"""cppmake auto-generates a big CMake file from C++ headers
"""
import setuptools
description = "Python Super CMake"
setuptools.setup(
name='generate_cmake',
version='1.8.1',
license='MIT',
long_description=__doc__,
url='https://github.com/jpanikulam/experiments/tree/master/pymake',
author='Jacob Panikulam',
author_email='jpanikul@gmail.com',
packages=setuptools.find_packages(),
description=description,
keywords="cmake make stupid",
platforms='any',
zip_safe=True,
scripts=['scripts/pymake'],
install_requires=[
'colorama==0.3.9',
],
)
|
2ab41930077dec805388bef4c5c8f67912240709 | templation/models.py | templation/models.py | # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
except OSError as e:
if e.errno != 17:
raise
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
post_save.connect(create_resource_access, sender=ResourceAccess)
| # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
except OSError as e:
if e.errno != 17:
raise
post_save.connect(create_resource_access, sender=ResourceAccess)
| Initialize folder only at creation | Initialize folder only at creation
| Python | bsd-3-clause | qdqmedia/django-templation,qdqmedia/django-templation,qdqmedia/django-templation | # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
except OSError as e:
if e.errno != 17:
raise
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
post_save.connect(create_resource_access, sender=ResourceAccess)
Initialize folder only at creation | # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
except OSError as e:
if e.errno != 17:
raise
post_save.connect(create_resource_access, sender=ResourceAccess)
| <commit_before># -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
except OSError as e:
if e.errno != 17:
raise
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
post_save.connect(create_resource_access, sender=ResourceAccess)
<commit_msg>Initialize folder only at creation<commit_after> | # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
except OSError as e:
if e.errno != 17:
raise
post_save.connect(create_resource_access, sender=ResourceAccess)
| # -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
except OSError as e:
if e.errno != 17:
raise
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
post_save.connect(create_resource_access, sender=ResourceAccess)
Initialize folder only at creation# -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
except OSError as e:
if e.errno != 17:
raise
post_save.connect(create_resource_access, sender=ResourceAccess)
| <commit_before># -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
except OSError as e:
if e.errno != 17:
raise
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
post_save.connect(create_resource_access, sender=ResourceAccess)
<commit_msg>Initialize folder only at creation<commit_after># -*- coding: utf-8 -*-
import os.path
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from .settings import DAV_ROOT, PROVIDER_NAME, RESOURCE_MODEL, RESOURCE_ACCESS_MODEL_INITIALIZER
class AbstractResourceAccess(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resource = models.ForeignKey(RESOURCE_MODEL)
class Meta:
abstract = True
verbose_name = _('ResourceAccess')
verbose_name_plural = _('ResourceAccesses')
unique_together = ('user', 'resource')
def get_absolute_url(self):
"""
Returns the WebDav path for this resource
"""
return os.path.join('/' + PROVIDER_NAME, str(self.resource.id)) + '/'
class ResourceAccess(AbstractResourceAccess):
"""
Resource Access Model
"""
def create_resource_access(sender, instance, created, **kwargs):
if created:
# Initialize folders (TODO: copy template)
try:
os.makedirs(os.path.join(DAV_ROOT, str(instance.resource.id)))
RESOURCE_ACCESS_MODEL_INITIALIZER(instance)
except OSError as e:
if e.errno != 17:
raise
post_save.connect(create_resource_access, sender=ResourceAccess)
|
aedabe987e6ce93d61ed7707f0ebdc874b60fa1b | libtmux/__about__.py | libtmux/__about__.py | __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
| __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__docs__ = 'https://libtmux.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/libtmux/issues'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
| Add docs and issue tracker to metadata | Add docs and issue tracker to metadata
| Python | bsd-3-clause | tony/libtmux | __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
Add docs and issue tracker to metadata | __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__docs__ = 'https://libtmux.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/libtmux/issues'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
| <commit_before>__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
<commit_msg>Add docs and issue tracker to metadata<commit_after> | __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__docs__ = 'https://libtmux.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/libtmux/issues'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
| __title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
Add docs and issue tracker to metadata__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__docs__ = 'https://libtmux.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/libtmux/issues'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
| <commit_before>__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
<commit_msg>Add docs and issue tracker to metadata<commit_after>__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.1'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__docs__ = 'https://libtmux.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/libtmux/issues'
__pypi__ = 'https://pypi.org/project/libtmux/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
|
2de494810b73dd69c6b4bb87e87007291309d573 | lightstep/util.py | lightstep/util.py | """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
| """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
| Fix _time_to_micros bug. It was calling time.time() when it should use its own argument. | Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.
| Python | mit | lightstephq/lightstep-tracer-python | """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument. | """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
| <commit_before>""" Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
<commit_msg>Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.<commit_after> | """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
| """ Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.""" Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
| <commit_before>""" Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
<commit_msg>Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.<commit_after>""" Utility functions
"""
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
|
7278f68b18f8cee3f9a78e1265df0994a23254bc | mamba/__init__.py | mamba/__init__.py | from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = [describe, context, before, after, skip]
| from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = ['describe', 'context', 'before', 'after', 'skip']
| Fix the import all mamba error | Fix the import all mamba error
| Python | mit | nestorsalceda/mamba,markng/mamba,alejandrodob/mamba,dex4er/mamba,angelsanz/mamba,jaimegildesagredo/mamba,eferro/mamba | from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = [describe, context, before, after, skip]
Fix the import all mamba error | from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = ['describe', 'context', 'before', 'after', 'skip']
| <commit_before>from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = [describe, context, before, after, skip]
<commit_msg>Fix the import all mamba error<commit_after> | from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = ['describe', 'context', 'before', 'after', 'skip']
| from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = [describe, context, before, after, skip]
Fix the import all mamba errorfrom mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = ['describe', 'context', 'before', 'after', 'skip']
| <commit_before>from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = [describe, context, before, after, skip]
<commit_msg>Fix the import all mamba error<commit_after>from mamba.loader import describe, context
from mamba.hooks import before, after
from mamba.decorators import skip
__all__ = ['describe', 'context', 'before', 'after', 'skip']
|
ec8d7181be646498717b8efa97dd6770d61f067a | test/viz/test_pca.py | test/viz/test_pca.py |
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
|
import pytest
@pytest.mark.timeout(10)
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
| Set timeout on pca test | Set timeout on pca test
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana |
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
Set timeout on pca test |
import pytest
@pytest.mark.timeout(10)
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
| <commit_before>
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
<commit_msg>Set timeout on pca test<commit_after> |
import pytest
@pytest.mark.timeout(10)
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
|
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
Set timeout on pca test
import pytest
@pytest.mark.timeout(10)
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
| <commit_before>
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
<commit_msg>Set timeout on pca test<commit_after>
import pytest
@pytest.mark.timeout(10)
def test_pca():
from sequana.viz.pca import PCA
from sequana import sequana_data
import pandas as pd
data = sequana_data("test_pca.csv")
df = pd.read_csv(data)
df = df.set_index("Id")
p = PCA(df, colors={
"A1": 'r', "A2": 'r', 'A3': 'r',
"B1": 'b', "B2": 'b', 'B3': 'b'})
p.plot(n_components=2, switch_y=True)
p.plot(n_components=2, switch_x=True)
p.plot(n_components=3, switch_z=True)
p.plot_pca_vs_max_features(n_components=4)
p.plot_pca_vs_max_features(step=50000)
|
a7ece0bd59b63455d26efbc927df7dc5607ce55b | tests/test_player.py | tests/test_player.py | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | Add unit tests for functions call and check and fold for module player | Add unit tests for functions call and check and fold for module player
| Python | mit | ueg1990/pypoker | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()Add unit tests for functions call and check and fold for module player | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | <commit_before>import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()<commit_msg>Add unit tests for functions call and check and fold for module player<commit_after> | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()Add unit tests for functions call and check and fold for module playerimport unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | <commit_before>import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()<commit_msg>Add unit tests for functions call and check and fold for module player<commit_after>import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() |
7bed531fcbc63de25572a6b02fb8b19bd066fa50 | test_pearhash.py | test_pearhash.py | import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
| import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
def test_two_bytes(self):
hasher = PearsonHasher(2)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297')
def test_two_bytes_custom_seed(self):
hasher = PearsonHasher(2, seed = 'whatevs')
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), 'd710')
def test_four_bytes(self):
hasher = PearsonHasher(4)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297b8d9')
| Add a few trivial tests | Add a few trivial tests
| Python | mit | ze-phyr-us/pearhash | import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
Add a few trivial tests | import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
def test_two_bytes(self):
hasher = PearsonHasher(2)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297')
def test_two_bytes_custom_seed(self):
hasher = PearsonHasher(2, seed = 'whatevs')
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), 'd710')
def test_four_bytes(self):
hasher = PearsonHasher(4)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297b8d9')
| <commit_before>import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
<commit_msg>Add a few trivial tests<commit_after> | import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
def test_two_bytes(self):
hasher = PearsonHasher(2)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297')
def test_two_bytes_custom_seed(self):
hasher = PearsonHasher(2, seed = 'whatevs')
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), 'd710')
def test_four_bytes(self):
hasher = PearsonHasher(4)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297b8d9')
| import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
Add a few trivial testsimport unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
def test_two_bytes(self):
hasher = PearsonHasher(2)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297')
def test_two_bytes_custom_seed(self):
hasher = PearsonHasher(2, seed = 'whatevs')
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), 'd710')
def test_four_bytes(self):
hasher = PearsonHasher(4)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297b8d9')
| <commit_before>import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
<commit_msg>Add a few trivial tests<commit_after>import unittest
from pearhash import PearsonHasher
class TestPearsonHasher(unittest.TestCase):
def test_table_is_a_permutation_of_range_256(self):
hasher = PearsonHasher(2)
self.assertEqual(set(hasher.table), set(range(256)))
def test_two_bytes(self):
hasher = PearsonHasher(2)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297')
def test_two_bytes_custom_seed(self):
hasher = PearsonHasher(2, seed = 'whatevs')
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), 'd710')
def test_four_bytes(self):
hasher = PearsonHasher(4)
self.assertEqual(hasher.hash(b'ni hao').hexdigest(), '1297b8d9')
|
44de3c76421a2ed4917ac7f2c6798dec631650a8 | spacy/tests/regression/test_issue834.py | spacy/tests/regression/test_issue834.py | # coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
| # coding: utf-8
from __future__ import unicode_literals
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
\u00A0 -1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab, text_file):
"""Test that no-break space (U+00A0) is detected as space by the load_vectors function."""
text_file.write(word2vec_str)
text_file.seek(0)
vector_length = en_vocab.load_vectors(text_file)
assert vector_length == 3
| Reformat test and use text_file fixture | Reformat test and use text_file fixture
| Python | mit | spacy-io/spaCy,raphael0202/spaCy,explosion/spaCy,explosion/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,banglakit/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,spacy-io/spaCy,raphael0202/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,raphael0202/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,raphael0202/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,explosion/spaCy,spacy-io/spaCy | # coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Reformat test and use text_file fixture | # coding: utf-8
from __future__ import unicode_literals
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
\u00A0 -1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab, text_file):
"""Test that no-break space (U+00A0) is detected as space by the load_vectors function."""
text_file.write(word2vec_str)
text_file.seek(0)
vector_length = en_vocab.load_vectors(text_file)
assert vector_length == 3
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Reformat test and use text_file fixture<commit_after> | # coding: utf-8
from __future__ import unicode_literals
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
\u00A0 -1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab, text_file):
"""Test that no-break space (U+00A0) is detected as space by the load_vectors function."""
text_file.write(word2vec_str)
text_file.seek(0)
vector_length = en_vocab.load_vectors(text_file)
assert vector_length == 3
| # coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Reformat test and use text_file fixture# coding: utf-8
from __future__ import unicode_literals
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
\u00A0 -1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab, text_file):
"""Test that no-break space (U+00A0) is detected as space by the load_vectors function."""
text_file.write(word2vec_str)
text_file.seek(0)
vector_length = en_vocab.load_vectors(text_file)
assert vector_length == 3
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Reformat test and use text_file fixture<commit_after># coding: utf-8
from __future__ import unicode_literals
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
\u00A0 -1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab, text_file):
"""Test that no-break space (U+00A0) is detected as space by the load_vectors function."""
text_file.write(word2vec_str)
text_file.seek(0)
vector_length = en_vocab.load_vectors(text_file)
assert vector_length == 3
|
042791f62619a4a8694df91dc8d03f0ba4460bc2 | mineserver.py | mineserver.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core.py.
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core/server.py
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) | Fix a misleading and confusing outdated typo in base layer. | Fix a misleading and confusing outdated typo in base layer.
| Python | mit | TiberiumPY/puremine,Armored-Dragon/pymineserver | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core.py.
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:])Fix a misleading and confusing outdated typo in base layer. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core/server.py
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) | <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core.py.
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:])<commit_msg>Fix a misleading and confusing outdated typo in base layer.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core/server.py
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core.py.
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:])Fix a misleading and confusing outdated typo in base layer.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core/server.py
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) | <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core.py.
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:])<commit_msg>Fix a misleading and confusing outdated typo in base layer.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the new base initialization layer for Mineserver.
# Always call this instead of server_core/server.py
from __future__ import print_function
from server_core.server import *
import sys
main(sys.argv[1:]) |
cdbb9e8bcffc4c5ba47791b81782df4a07273b6b | Lib/test/test_file.py | Lib/test/test_file.py | from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
| import os
from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
os.unlink(TESTFN)
| Clean up the temporary file when done with it. | Clean up the temporary file when done with it.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
Clean up the temporary file when done with it. | import os
from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
os.unlink(TESTFN)
| <commit_before>from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
<commit_msg>Clean up the temporary file when done with it.<commit_after> | import os
from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
os.unlink(TESTFN)
| from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
Clean up the temporary file when done with it.import os
from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
os.unlink(TESTFN)
| <commit_before>from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
<commit_msg>Clean up the temporary file when done with it.<commit_after>import os
from test_support import TESTFN
from UserList import UserList
# verify writelines with instance sequence
l = UserList(['1', '2'])
f = open(TESTFN, 'wb')
f.writelines(l)
f.close()
f = open(TESTFN, 'rb')
buf = f.read()
f.close()
assert buf == '12'
# verify writelines with integers
f = open(TESTFN, 'wb')
try:
f.writelines([1, 2, 3])
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with integers in UserList
f = open(TESTFN, 'wb')
l = UserList([1,2,3])
try:
f.writelines(l)
except TypeError:
pass
else:
print "writelines accepted sequence of integers"
f.close()
# verify writelines with non-string object
class NonString: pass
f = open(TESTFN, 'wb')
try:
f.writelines([NonString(), NonString()])
except TypeError:
pass
else:
print "writelines accepted sequence of non-string objects"
f.close()
os.unlink(TESTFN)
|
be1904cdbea85fa53b1a324449bd42aab320e185 | config/interface.py | config/interface.py | import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasoruce_name = ".datasource_%s" % name
return importlib.import_module(datasoruce_name, "config") | import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasource_name = ".datasource_%s" % name
return importlib.import_module(datasource_name, "config")
| Correct a typo in variable name | Correct a typo in variable name
| Python | bsd-3-clause | pySTEPS/pysteps | import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasoruce_name = ".datasource_%s" % name
return importlib.import_module(datasoruce_name, "config")Correct a typo in variable name | import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasource_name = ".datasource_%s" % name
return importlib.import_module(datasource_name, "config")
| <commit_before>import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasoruce_name = ".datasource_%s" % name
return importlib.import_module(datasoruce_name, "config")<commit_msg>Correct a typo in variable name<commit_after> | import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasource_name = ".datasource_%s" % name
return importlib.import_module(datasource_name, "config")
| import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasoruce_name = ".datasource_%s" % name
return importlib.import_module(datasoruce_name, "config")Correct a typo in variable nameimport importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasource_name = ".datasource_%s" % name
return importlib.import_module(datasource_name, "config")
| <commit_before>import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasoruce_name = ".datasource_%s" % name
return importlib.import_module(datasoruce_name, "config")<commit_msg>Correct a typo in variable name<commit_after>import importlib
def get_specifications(name):
"""Return a datasource specification file according to the following pattern:\n\
datasource_<name>.py
where <name> is the datasource identifier (e.g. "fmi", "mch", "bom", etc.)
"""
datasource_name = ".datasource_%s" % name
return importlib.import_module(datasource_name, "config")
|
f89f24c22d2dd3c1bab59083397501238e2d0ba9 | sputnik/core.py | sputnik/core.py | import requests
import simplejson as json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
| import requests
try:
import simplejson as json
except ImportError:
import json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
| Add json module as fallback for simplejson | Add json module as fallback for simplejson
| Python | mit | iconpin/sputnik-python | import requests
import simplejson as json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
Add json module as fallback for simplejson | import requests
try:
import simplejson as json
except ImportError:
import json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
| <commit_before>import requests
import simplejson as json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
<commit_msg>Add json module as fallback for simplejson<commit_after> | import requests
try:
import simplejson as json
except ImportError:
import json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
| import requests
import simplejson as json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
Add json module as fallback for simplejsonimport requests
try:
import simplejson as json
except ImportError:
import json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
| <commit_before>import requests
import simplejson as json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
<commit_msg>Add json module as fallback for simplejson<commit_after>import requests
try:
import simplejson as json
except ImportError:
import json
def getbaseurl(service='search', version='1', method='track',
format='json'):
"""Returns the base URL for a Spotify Web API query"""
baseurl = "http://ws.spotify.com/{0}/{1}/{2}.{3}"
return baseurl.format(service, version, method, format)
def search(terms, method='track'):
if hasattr(terms, '__iter__'):
sterms = ' '.join(terms)
else:
sterms = terms
base = getbaseurl(method=method)
r = requests.get(base, params={'q': sterms})
if r.status_code != requests.codes.ok:
raise NotImplementedException("There was some problem. Exception"
"not defined yet")
data = r.json()
return data
|
2ba8beb54b6de9fbe68501fa71a878da1426e6cd | tests/conftest.py | tests/conftest.py | import os
import pytest
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session | import os, sys
import pytest
""" So PYTHONPATH enviroment variable doesn't have to
be set for pytest to find mothership module. """
curdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(curdir,'..'))
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session | Allow tests to find mothership module | Allow tests to find mothership module
| Python | mit | afl-mothership/afl-mothership,afl-mothership/afl-mothership,afl-mothership/afl-mothership,afl-mothership/afl-mothership | import os
import pytest
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return sessionAllow tests to find mothership module | import os, sys
import pytest
""" So PYTHONPATH enviroment variable doesn't have to
be set for pytest to find mothership module. """
curdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(curdir,'..'))
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session | <commit_before>import os
import pytest
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session<commit_msg>Allow tests to find mothership module<commit_after> | import os, sys
import pytest
""" So PYTHONPATH enviroment variable doesn't have to
be set for pytest to find mothership module. """
curdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(curdir,'..'))
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session | import os
import pytest
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return sessionAllow tests to find mothership moduleimport os, sys
import pytest
""" So PYTHONPATH enviroment variable doesn't have to
be set for pytest to find mothership module. """
curdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(curdir,'..'))
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session | <commit_before>import os
import pytest
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session<commit_msg>Allow tests to find mothership module<commit_after>import os, sys
import pytest
""" So PYTHONPATH enviroment variable doesn't have to
be set for pytest to find mothership module. """
curdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(curdir,'..'))
from mothership import create_app, settings
from mothership import db as _db
@pytest.fixture(scope='session')
def app(request):
app = create_app('mothership.settings.TestConfig')
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture(scope='session')
def db(app, request):
"""Session-wide test database."""
if os.path.exists(settings.db_file.name):
os.unlink(settings.db_file.name)
_db.app = app
_db.create_all()
request.addfinalizer(_db.drop_all)
return _db
@pytest.fixture(scope='function')
def session(db, request):
"""Creates a new database session for a test."""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session = db.create_scoped_session(options=options)
db.session = session
def teardown():
transaction.rollback()
connection.close()
session.remove()
request.addfinalizer(teardown)
return session |
40ae333ab81ae1f4d93f3937306ddd12718b59a8 | virtool/processes.py | virtool/processes.py | import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "",
"install_hmms": ""
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
| import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "download",
"install_hmms": "download"
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
| Make download first step for install_software process type | Make download first step for install_software process type
| Python | mit | virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool | import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "",
"install_hmms": ""
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
Make download first step for install_software process type | import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "download",
"install_hmms": "download"
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
| <commit_before>import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "",
"install_hmms": ""
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
<commit_msg>Make download first step for install_software process type<commit_after> | import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "download",
"install_hmms": "download"
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
| import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "",
"install_hmms": ""
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
Make download first step for install_software process typeimport virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "download",
"install_hmms": "download"
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
| <commit_before>import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "",
"install_hmms": ""
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
<commit_msg>Make download first step for install_software process type<commit_after>import virtool.db.processes
FIRST_STEPS = {
"delete_reference": "delete_indexes",
"clone_reference": "copy_otus",
"import_reference": "load_file",
"remote_reference": "download",
"update_remote_reference": "download",
"update_software": "download",
"install_hmms": "download"
}
class ProgressTracker:
def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0):
self.db = db
self.process_id = process_id
self.total = total
self.factor = factor
self.increment = increment
self.initial = initial
self.count = 0
self.last_reported = 0
self.progress = self.initial
async def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
self.progress = self.initial + round(self.count / self.total * self.factor, 2)
if self.progress - self.last_reported >= self.increment:
await virtool.db.processes.update(self.db, self.process_id, progress=self.progress)
self.last_reported = self.progress
return self.progress
|
6ae84a6e098275cdaac8598695c97403dcb2092e | volttron/__init__.py | volttron/__init__.py | '''
Copyright (c) 2013, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
'''
'''
This material was prepared as an account of work sponsored by an
agency of the United States Government. Neither the United States
Government nor the United States Department of Energy, nor Battelle,
nor any of their employees, nor any jurisdiction or organization
that has cooperated in the development of these materials, makes
any warranty, express or implied, or assumes any legal liability
or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed,
or represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or
service by trade name, trademark, manufacturer, or otherwise does
not necessarily constitute or imply its endorsement, recommendation,
r favoring by the United States Government or any agency thereof,
or Battelle Memorial Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the
United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
'''
| # This is a namespace package; do not add anything else to this file.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| Make volttron a namespace package. | Make volttron a namespace package.
| Python | bsd-2-clause | schandrika/volttron,schandrika/volttron,schandrika/volttron,schandrika/volttron | '''
Copyright (c) 2013, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
'''
'''
This material was prepared as an account of work sponsored by an
agency of the United States Government. Neither the United States
Government nor the United States Department of Energy, nor Battelle,
nor any of their employees, nor any jurisdiction or organization
that has cooperated in the development of these materials, makes
any warranty, express or implied, or assumes any legal liability
or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed,
or represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or
service by trade name, trademark, manufacturer, or otherwise does
not necessarily constitute or imply its endorsement, recommendation,
r favoring by the United States Government or any agency thereof,
or Battelle Memorial Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the
United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
'''
Make volttron a namespace package. | # This is a namespace package; do not add anything else to this file.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before>'''
Copyright (c) 2013, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
'''
'''
This material was prepared as an account of work sponsored by an
agency of the United States Government. Neither the United States
Government nor the United States Department of Energy, nor Battelle,
nor any of their employees, nor any jurisdiction or organization
that has cooperated in the development of these materials, makes
any warranty, express or implied, or assumes any legal liability
or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed,
or represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or
service by trade name, trademark, manufacturer, or otherwise does
not necessarily constitute or imply its endorsement, recommendation,
r favoring by the United States Government or any agency thereof,
or Battelle Memorial Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the
United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
'''
<commit_msg>Make volttron a namespace package.<commit_after> | # This is a namespace package; do not add anything else to this file.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| '''
Copyright (c) 2013, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
'''
'''
This material was prepared as an account of work sponsored by an
agency of the United States Government. Neither the United States
Government nor the United States Department of Energy, nor Battelle,
nor any of their employees, nor any jurisdiction or organization
that has cooperated in the development of these materials, makes
any warranty, express or implied, or assumes any legal liability
or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed,
or represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or
service by trade name, trademark, manufacturer, or otherwise does
not necessarily constitute or imply its endorsement, recommendation,
r favoring by the United States Government or any agency thereof,
or Battelle Memorial Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the
United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
'''
Make volttron a namespace package.# This is a namespace package; do not add anything else to this file.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before>'''
Copyright (c) 2013, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
'''
'''
This material was prepared as an account of work sponsored by an
agency of the United States Government. Neither the United States
Government nor the United States Department of Energy, nor Battelle,
nor any of their employees, nor any jurisdiction or organization
that has cooperated in the development of these materials, makes
any warranty, express or implied, or assumes any legal liability
or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed,
or represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or
service by trade name, trademark, manufacturer, or otherwise does
not necessarily constitute or imply its endorsement, recommendation,
r favoring by the United States Government or any agency thereof,
or Battelle Memorial Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the
United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
'''
<commit_msg>Make volttron a namespace package.<commit_after># This is a namespace package; do not add anything else to this file.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
7c12d7f8e5fcd4cc328e109e0bdde9e62b4706f7 | parse_tweets.py | parse_tweets.py | # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
| # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
import sys
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
tweets = pd.DataFrame()
def getData(tweet, key):
try:
txt = tweet[key]
except:
return ''
return txt
tweets['text'] = map(lambda tweet: getData(tweet, 'text') if getData(tweet, \
'text') != None else '', tweets_data)
tweets['lang'] = map(lambda tweet: getData(tweet, 'lang') if getData(tweet,
'lang') != None else '', tweets_data)
tweets['country'] = map(lambda tweet: getData(tweet, 'place')['country'] if getData(tweet,
'place') != None else None, tweets_data)
#tweets_by_lang = tweets['lang'].value_counts()
#tweets_by_country = tweets['country'].value_counts()
#fig, ax = plt.subplots()
#ax.tick_params(axis = 'x', labelsize = 15)
#ax.tick_params(axis = 'y', labelsize = 10)
#ax.set_xlabel('Languages', fontsize = 15)
#ax.set_ylabel('No. of tweets', fontsize = 15)
#ax.set_title('Top five languages', fontsize = 15, fontweight = 'bold')
#tweets_by_lang[:5].plot(ax = ax, kind = 'bar', color = 'red')
#tweets_by_country[:5].plot(ax = ax, kind = 'bar', color = 'blue')
##plt.show()
def word_in_text(word, text):
if re.search(word.lower(), text.lower()):
return True
return False
tweets['python'] = \
tweets['text'].apply(lambda tweet: word_in_text('python', tweet))
tweets['javascript'] = \
tweets['text'].apply(lambda tweet: word_in_text('javascript', tweet))
tweets['ruby'] = \
tweets['text'].apply(lambda tweet: word_in_text('ruby', tweet))
print tweets['python'].value_counts()[True]
print tweets['javascript'].value_counts()[True]
print tweets['ruby'].value_counts()[True]
| Add next part of parser, and fix errors in earlier part arising from incomplete data items | Add next part of parser, and fix errors in earlier part arising from incomplete data items
| Python | mit | 0x7df/twitter2pocket | # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
Add next part of parser, and fix errors in earlier part arising from incomplete data items | # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
import sys
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
tweets = pd.DataFrame()
def getData(tweet, key):
try:
txt = tweet[key]
except:
return ''
return txt
tweets['text'] = map(lambda tweet: getData(tweet, 'text') if getData(tweet, \
'text') != None else '', tweets_data)
tweets['lang'] = map(lambda tweet: getData(tweet, 'lang') if getData(tweet,
'lang') != None else '', tweets_data)
tweets['country'] = map(lambda tweet: getData(tweet, 'place')['country'] if getData(tweet,
'place') != None else None, tweets_data)
#tweets_by_lang = tweets['lang'].value_counts()
#tweets_by_country = tweets['country'].value_counts()
#fig, ax = plt.subplots()
#ax.tick_params(axis = 'x', labelsize = 15)
#ax.tick_params(axis = 'y', labelsize = 10)
#ax.set_xlabel('Languages', fontsize = 15)
#ax.set_ylabel('No. of tweets', fontsize = 15)
#ax.set_title('Top five languages', fontsize = 15, fontweight = 'bold')
#tweets_by_lang[:5].plot(ax = ax, kind = 'bar', color = 'red')
#tweets_by_country[:5].plot(ax = ax, kind = 'bar', color = 'blue')
##plt.show()
def word_in_text(word, text):
if re.search(word.lower(), text.lower()):
return True
return False
tweets['python'] = \
tweets['text'].apply(lambda tweet: word_in_text('python', tweet))
tweets['javascript'] = \
tweets['text'].apply(lambda tweet: word_in_text('javascript', tweet))
tweets['ruby'] = \
tweets['text'].apply(lambda tweet: word_in_text('ruby', tweet))
print tweets['python'].value_counts()[True]
print tweets['javascript'].value_counts()[True]
print tweets['ruby'].value_counts()[True]
| <commit_before># Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
<commit_msg>Add next part of parser, and fix errors in earlier part arising from incomplete data items<commit_after> | # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
import sys
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
tweets = pd.DataFrame()
def getData(tweet, key):
try:
txt = tweet[key]
except:
return ''
return txt
tweets['text'] = map(lambda tweet: getData(tweet, 'text') if getData(tweet, \
'text') != None else '', tweets_data)
tweets['lang'] = map(lambda tweet: getData(tweet, 'lang') if getData(tweet,
'lang') != None else '', tweets_data)
tweets['country'] = map(lambda tweet: getData(tweet, 'place')['country'] if getData(tweet,
'place') != None else None, tweets_data)
#tweets_by_lang = tweets['lang'].value_counts()
#tweets_by_country = tweets['country'].value_counts()
#fig, ax = plt.subplots()
#ax.tick_params(axis = 'x', labelsize = 15)
#ax.tick_params(axis = 'y', labelsize = 10)
#ax.set_xlabel('Languages', fontsize = 15)
#ax.set_ylabel('No. of tweets', fontsize = 15)
#ax.set_title('Top five languages', fontsize = 15, fontweight = 'bold')
#tweets_by_lang[:5].plot(ax = ax, kind = 'bar', color = 'red')
#tweets_by_country[:5].plot(ax = ax, kind = 'bar', color = 'blue')
##plt.show()
def word_in_text(word, text):
if re.search(word.lower(), text.lower()):
return True
return False
tweets['python'] = \
tweets['text'].apply(lambda tweet: word_in_text('python', tweet))
tweets['javascript'] = \
tweets['text'].apply(lambda tweet: word_in_text('javascript', tweet))
tweets['ruby'] = \
tweets['text'].apply(lambda tweet: word_in_text('ruby', tweet))
print tweets['python'].value_counts()[True]
print tweets['javascript'].value_counts()[True]
print tweets['ruby'].value_counts()[True]
| # Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
Add next part of parser, and fix errors in earlier part arising from incomplete data items# Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
import sys
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
tweets = pd.DataFrame()
def getData(tweet, key):
try:
txt = tweet[key]
except:
return ''
return txt
tweets['text'] = map(lambda tweet: getData(tweet, 'text') if getData(tweet, \
'text') != None else '', tweets_data)
tweets['lang'] = map(lambda tweet: getData(tweet, 'lang') if getData(tweet,
'lang') != None else '', tweets_data)
tweets['country'] = map(lambda tweet: getData(tweet, 'place')['country'] if getData(tweet,
'place') != None else None, tweets_data)
#tweets_by_lang = tweets['lang'].value_counts()
#tweets_by_country = tweets['country'].value_counts()
#fig, ax = plt.subplots()
#ax.tick_params(axis = 'x', labelsize = 15)
#ax.tick_params(axis = 'y', labelsize = 10)
#ax.set_xlabel('Languages', fontsize = 15)
#ax.set_ylabel('No. of tweets', fontsize = 15)
#ax.set_title('Top five languages', fontsize = 15, fontweight = 'bold')
#tweets_by_lang[:5].plot(ax = ax, kind = 'bar', color = 'red')
#tweets_by_country[:5].plot(ax = ax, kind = 'bar', color = 'blue')
##plt.show()
def word_in_text(word, text):
if re.search(word.lower(), text.lower()):
return True
return False
tweets['python'] = \
tweets['text'].apply(lambda tweet: word_in_text('python', tweet))
tweets['javascript'] = \
tweets['text'].apply(lambda tweet: word_in_text('javascript', tweet))
tweets['ruby'] = \
tweets['text'].apply(lambda tweet: word_in_text('ruby', tweet))
print tweets['python'].value_counts()[True]
print tweets['javascript'].value_counts()[True]
print tweets['ruby'].value_counts()[True]
| <commit_before># Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
<commit_msg>Add next part of parser, and fix errors in earlier part arising from incomplete data items<commit_after># Requires pandas and matplotlib to be installed, e.g.
#
# `sudo apt-get install pandas-python`
import json
import re
import pandas as pd
import matplotlib.pyplot as plt
import sys
tweets_data_path = './twitter_data.txt'
tweets_data = []
tweets_file = open(tweets_data_path, "r")
for line in tweets_file:
try:
tweet = json.loads(line)
tweets_data.append(tweet)
except:
continue
print len(tweets_data)
tweets = pd.DataFrame()
def getData(tweet, key):
try:
txt = tweet[key]
except:
return ''
return txt
tweets['text'] = map(lambda tweet: getData(tweet, 'text') if getData(tweet, \
'text') != None else '', tweets_data)
tweets['lang'] = map(lambda tweet: getData(tweet, 'lang') if getData(tweet,
'lang') != None else '', tweets_data)
tweets['country'] = map(lambda tweet: getData(tweet, 'place')['country'] if getData(tweet,
'place') != None else None, tweets_data)
#tweets_by_lang = tweets['lang'].value_counts()
#tweets_by_country = tweets['country'].value_counts()
#fig, ax = plt.subplots()
#ax.tick_params(axis = 'x', labelsize = 15)
#ax.tick_params(axis = 'y', labelsize = 10)
#ax.set_xlabel('Languages', fontsize = 15)
#ax.set_ylabel('No. of tweets', fontsize = 15)
#ax.set_title('Top five languages', fontsize = 15, fontweight = 'bold')
#tweets_by_lang[:5].plot(ax = ax, kind = 'bar', color = 'red')
#tweets_by_country[:5].plot(ax = ax, kind = 'bar', color = 'blue')
##plt.show()
def word_in_text(word, text):
if re.search(word.lower(), text.lower()):
return True
return False
tweets['python'] = \
tweets['text'].apply(lambda tweet: word_in_text('python', tweet))
tweets['javascript'] = \
tweets['text'].apply(lambda tweet: word_in_text('javascript', tweet))
tweets['ruby'] = \
tweets['text'].apply(lambda tweet: word_in_text('ruby', tweet))
print tweets['python'].value_counts()[True]
print tweets['javascript'].value_counts()[True]
print tweets['ruby'].value_counts()[True]
|
9cfdb35fb1f645eda99d28085b093ee36dd14625 | processors/closure_compiler.py | processors/closure_compiler.py |
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
| Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies | Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies
| Python | bsd-2-clause | potatolondon/assetpipe |
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies |
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
| <commit_before>
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
<commit_msg>Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies<commit_after> |
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
| <commit_before>
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
<commit_msg>Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies<commit_after>
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
9452c8d4bebcb8466cda82d272c04bd9abe7f91d | tests/test_app.py | tests/test_app.py | import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEquals(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEquals(res.code, 200)
if __name__ == '__main__':
unittest.main()
| import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEqual(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEqual(res.code, 200)
if __name__ == '__main__':
unittest.main()
| Change assertEquals to assertEqual due to deprecation warnings | Change assertEquals to assertEqual due to deprecation warnings
| Python | mit | Laspimon/transact,Laspimon/transact | import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEquals(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEquals(res.code, 200)
if __name__ == '__main__':
unittest.main()
Change assertEquals to assertEqual due to deprecation warnings | import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEqual(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEqual(res.code, 200)
if __name__ == '__main__':
unittest.main()
| <commit_before>import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEquals(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEquals(res.code, 200)
if __name__ == '__main__':
unittest.main()
<commit_msg>Change assertEquals to assertEqual due to deprecation warnings<commit_after> | import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEqual(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEqual(res.code, 200)
if __name__ == '__main__':
unittest.main()
| import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEquals(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEquals(res.code, 200)
if __name__ == '__main__':
unittest.main()
Change assertEquals to assertEqual due to deprecation warningsimport flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEqual(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEqual(res.code, 200)
if __name__ == '__main__':
unittest.main()
| <commit_before>import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEquals(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEquals(res.code, 200)
if __name__ == '__main__':
unittest.main()
<commit_msg>Change assertEquals to assertEqual due to deprecation warnings<commit_after>import flask_testing
import logging
import unittest
import urllib
import server
logging.disable(logging.CRITICAL)
class TestIndex(unittest.TestCase):
def test_index_returns_greeting(self):
self.assertEqual(server.index(), 'Hello World')
class TestLiveIndex(flask_testing.LiveServerTestCase):
def create_app(self):
app = server.Flask(server.__name__)
app = server.app
app.config['TESTING'] = True
app.config['LIVE_SERVERPORT'] = 0
return app
def test_server_awake(self):
res = urllib.request.urlopen(self.get_server_url())
self.assertEqual(res.code, 200)
if __name__ == '__main__':
unittest.main()
|
84342312ab663b1d7c9a9ac5e09811c2ed636fb4 | site_scons/utils.py | site_scons/utils.py | import os
import os.path
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
| import os
import os.path
import hashlib
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
def file_sum(file_path, hash_type='md5'):
if hash not in [ 'sha1', 'md5' ]:
raise ValueError('Invalid hash type: %s' % (hash_type))
file_hash = getattr(hashlib, hash_type, None)
with open(file_path, 'rb') as fp:
content = fp.read()
file_hash.update(content)
return file_hash.hexdigest()
def get_tar_bin_path(where_is_func, possible_names=None):
if not possible_names:
possible_names = [ 'gnutar', 'gtar', 'tar' ]
for binary in possible_names:
binary_path = where_is_func(binary)
if binary_path:
return binary_path
return None
| Add some more utility functions. | Add some more utility functions.
| Python | apache-2.0 | cloudkick/cast,cloudkick/cast,cloudkick/cast,cloudkick/cast | import os
import os.path
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
Add some more utility functions. | import os
import os.path
import hashlib
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
def file_sum(file_path, hash_type='md5'):
if hash not in [ 'sha1', 'md5' ]:
raise ValueError('Invalid hash type: %s' % (hash_type))
file_hash = getattr(hashlib, hash_type, None)
with open(file_path, 'rb') as fp:
content = fp.read()
file_hash.update(content)
return file_hash.hexdigest()
def get_tar_bin_path(where_is_func, possible_names=None):
if not possible_names:
possible_names = [ 'gnutar', 'gtar', 'tar' ]
for binary in possible_names:
binary_path = where_is_func(binary)
if binary_path:
return binary_path
return None
| <commit_before>import os
import os.path
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
<commit_msg>Add some more utility functions.<commit_after> | import os
import os.path
import hashlib
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
def file_sum(file_path, hash_type='md5'):
if hash not in [ 'sha1', 'md5' ]:
raise ValueError('Invalid hash type: %s' % (hash_type))
file_hash = getattr(hashlib, hash_type, None)
with open(file_path, 'rb') as fp:
content = fp.read()
file_hash.update(content)
return file_hash.hexdigest()
def get_tar_bin_path(where_is_func, possible_names=None):
if not possible_names:
possible_names = [ 'gnutar', 'gtar', 'tar' ]
for binary in possible_names:
binary_path = where_is_func(binary)
if binary_path:
return binary_path
return None
| import os
import os.path
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
Add some more utility functions.import os
import os.path
import hashlib
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
def file_sum(file_path, hash_type='md5'):
if hash not in [ 'sha1', 'md5' ]:
raise ValueError('Invalid hash type: %s' % (hash_type))
file_hash = getattr(hashlib, hash_type, None)
with open(file_path, 'rb') as fp:
content = fp.read()
file_hash.update(content)
return file_hash.hexdigest()
def get_tar_bin_path(where_is_func, possible_names=None):
if not possible_names:
possible_names = [ 'gnutar', 'gtar', 'tar' ]
for binary in possible_names:
binary_path = where_is_func(binary)
if binary_path:
return binary_path
return None
| <commit_before>import os
import os.path
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
<commit_msg>Add some more utility functions.<commit_after>import os
import os.path
import hashlib
from os.path import join as pjoin
def download_file(source, target):
return 'wget %s -O %s' % (source, target)
def get_file_list(base_path, include_list = None, exclude_list = None):
if not isinstance(include_list, (list, tuple)):
include_list = [ include_list ]
if not exclude_list:
exclude_list = []
def is_included(file_path):
for path in include_list:
if file_path.find(path) == 0:
return True
return False
def is_excluded(file_path):
for path in exclude_list:
if file_path.find(path) == 0:
return True
return False
files = []
for (dirpath, dirname, filenames) in os.walk(base_path):
for file_name in filenames:
file_path = pjoin(dirpath.replace('%s/' % (base_path), ''), file_name)
if is_included(file_path) and not is_excluded(file_path):
files.append(file_path)
return files
def file_sum(file_path, hash_type='md5'):
if hash not in [ 'sha1', 'md5' ]:
raise ValueError('Invalid hash type: %s' % (hash_type))
file_hash = getattr(hashlib, hash_type, None)
with open(file_path, 'rb') as fp:
content = fp.read()
file_hash.update(content)
return file_hash.hexdigest()
def get_tar_bin_path(where_is_func, possible_names=None):
if not possible_names:
possible_names = [ 'gnutar', 'gtar', 'tar' ]
for binary in possible_names:
binary_path = where_is_func(binary)
if binary_path:
return binary_path
return None
|
38d2aceecf485e59af4e66be711d7d0f12086c06 | twinsies/clock.py | twinsies/clock.py | from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
del fetched_tweets
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
| from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
| Remove del fetched_tweets (now a generator) | Remove del fetched_tweets (now a generator)
| Python | mit | kkwteh/twinyewest | from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
del fetched_tweets
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
Remove del fetched_tweets (now a generator) | from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
| <commit_before>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
del fetched_tweets
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
<commit_msg>Remove del fetched_tweets (now a generator)<commit_after> | from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
| from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
del fetched_tweets
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
Remove del fetched_tweets (now a generator)from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
| <commit_before>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
del fetched_tweets
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
<commit_msg>Remove del fetched_tweets (now a generator)<commit_after>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
|
caa96562fb65dfdedc37f6efc463701e8b22d410 | zipview/views.py | zipview/views.py | import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.utils.six import b
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b(""), name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
| import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b"", name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
| Remove obsolete python2 unicode helpers | Remove obsolete python2 unicode helpers
| Python | mit | thibault/django-zipview | import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.utils.six import b
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b(""), name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
Remove obsolete python2 unicode helpers | import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b"", name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
| <commit_before>import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.utils.six import b
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b(""), name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
<commit_msg>Remove obsolete python2 unicode helpers<commit_after> | import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b"", name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
| import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.utils.six import b
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b(""), name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
Remove obsolete python2 unicode helpersimport zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b"", name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
| <commit_before>import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.utils.six import b
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b(""), name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
<commit_msg>Remove obsolete python2 unicode helpers<commit_after>import zipfile
from django.views.generic import View
from django.http import HttpResponse
from django.core.files.base import ContentFile
class BaseZipView(View):
"""A base view to zip and stream several files."""
http_method_names = ['get']
zipfile_name = 'download.zip'
def get_files(self):
"""Must return a list of django's `File` objects."""
raise NotImplementedError()
def get_archive_name(self, request):
return self.zipfile_name
def get(self, request, *args, **kwargs):
temp_file = ContentFile(b"", name=self.zipfile_name)
with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file:
files = self.get_files()
for file_ in files:
path = file_.name
zip_file.writestr(path, file_.read())
file_size = temp_file.tell()
temp_file.seek(0)
response = HttpResponse(temp_file, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % self.get_archive_name(request)
response['Content-Length'] = file_size
return response
|
1e006b5df70303b743cb2fd7d6c2a5ef4234f70b | zazo/__init__.py | zazo/__init__.py | """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.1.0.dev0"
| """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.0.0a2"
| Switch to an alpha version | Switch to an alpha version
| Python | mit | pradyunsg/zazo,pradyunsg/zazo | """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.1.0.dev0"
Switch to an alpha version | """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.0.0a2"
| <commit_before>"""An Extensible Dependency Resolver written in Python
"""
__version__ = "0.1.0.dev0"
<commit_msg>Switch to an alpha version<commit_after> | """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.0.0a2"
| """An Extensible Dependency Resolver written in Python
"""
__version__ = "0.1.0.dev0"
Switch to an alpha version"""An Extensible Dependency Resolver written in Python
"""
__version__ = "0.0.0a2"
| <commit_before>"""An Extensible Dependency Resolver written in Python
"""
__version__ = "0.1.0.dev0"
<commit_msg>Switch to an alpha version<commit_after>"""An Extensible Dependency Resolver written in Python
"""
__version__ = "0.0.0a2"
|
fd819ff0ff1a7d73dd58f152d2c4be8aea18e2d3 | rebulk/processors.py | rebulk/processors.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
| Fix issue when a private match is found multiple times | Fix issue when a private match is found multiple times
| Python | mit | Toilal/rebulk | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
Fix issue when a private match is found multiple times | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
<commit_msg>Fix issue when a private match is found multiple times<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
Fix issue when a private match is found multiple times#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
<commit_msg>Fix issue when a private match is found multiple times<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Processor functions
"""
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
|
7366e84afdc93b68278b64bc9ddfac08901cb032 | python/peacock/tests/postprocessor_tab/gold/TestPostprocessorPluginManager_test_script.py | python/peacock/tests/postprocessor_tab/gold/TestPostprocessorPluginManager_test_script.py | #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| Remove header from gold script file | Remove header from gold script file
| Python | lgpl-2.1 | milljm/moose,YaqiWang/moose,nuclear-wizard/moose,bwspenc/moose,sapitts/moose,idaholab/moose,idaholab/moose,jessecarterMOOSE/moose,permcody/moose,nuclear-wizard/moose,milljm/moose,lindsayad/moose,laagesen/moose,idaholab/moose,andrsd/moose,milljm/moose,SudiptaBiswas/moose,dschwen/moose,permcody/moose,lindsayad/moose,sapitts/moose,YaqiWang/moose,bwspenc/moose,milljm/moose,nuclear-wizard/moose,andrsd/moose,bwspenc/moose,harterj/moose,lindsayad/moose,harterj/moose,lindsayad/moose,harterj/moose,SudiptaBiswas/moose,permcody/moose,jessecarterMOOSE/moose,harterj/moose,laagesen/moose,bwspenc/moose,laagesen/moose,milljm/moose,dschwen/moose,laagesen/moose,jessecarterMOOSE/moose,harterj/moose,andrsd/moose,jessecarterMOOSE/moose,SudiptaBiswas/moose,laagesen/moose,SudiptaBiswas/moose,jessecarterMOOSE/moose,andrsd/moose,sapitts/moose,andrsd/moose,permcody/moose,idaholab/moose,nuclear-wizard/moose,dschwen/moose,idaholab/moose,bwspenc/moose,YaqiWang/moose,lindsayad/moose,dschwen/moose,SudiptaBiswas/moose,sapitts/moose,dschwen/moose,YaqiWang/moose,sapitts/moose | #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
Remove header from gold script file | """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| <commit_before>#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
<commit_msg>Remove header from gold script file<commit_after> | """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
Remove header from gold script file"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| <commit_before>#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
<commit_msg>Remove header from gold script file<commit_after>"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.94])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
|
ae5b93c4e12f732a8c56de80b39f227c90ef4809 | polls/models.py | polls/models.py | from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
| from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
ordering = ('-published_at',)
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
| Order questions by published date | Order questions by published date
Closes #23
| Python | mit | apiaryio/polls-api | from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
Order questions by published date
Closes #23 | from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
ordering = ('-published_at',)
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
| <commit_before>from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
<commit_msg>Order questions by published date
Closes #23<commit_after> | from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
ordering = ('-published_at',)
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
| from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
Order questions by published date
Closes #23from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
ordering = ('-published_at',)
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
| <commit_before>from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
<commit_msg>Order questions by published date
Closes #23<commit_after>from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=140)
published_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'published_at'
ordering = ('-published_at',)
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=140)
def __str__(self):
return self.choice_text
def vote(self):
"""
Create a vote on this choice.
"""
return Vote.objects.create(choice=self)
class Vote(models.Model):
choice = models.ForeignKey(Choice, related_name='votes')
|
10e3c7b8dbc4befa2533de1a07f1f7827b961f81 | rejected/__init__.py | rejected/__init__.py | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
| """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
| Fix noqa location, bump version | Fix noqa location, bump version
| Python | bsd-3-clause | gmr/rejected,gmr/rejected | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
Fix noqa location, bump version | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
| <commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
<commit_msg>Fix noqa location, bump version<commit_after> | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
| """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
Fix noqa location, bump version"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
| <commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
<commit_msg>Fix noqa location, bump version<commit_after>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
|
c3d20f0f394063abe7e149d9de41aa3ac0ab91f1 | knowit/rules/alternative.py | knowit/rules/alternative.py |
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
|
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
| Fix multiple spaces after ',' | Fix multiple spaces after ','
| Python | mit | ratoaq2/knowit |
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
Fix multiple spaces after ',' |
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
| <commit_before>
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
<commit_msg>Fix multiple spaces after ','<commit_after> |
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
|
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
Fix multiple spaces after ','
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
| <commit_before>
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
<commit_msg>Fix multiple spaces after ','<commit_after>
from knowit.rule import Rule
class AlternativeRule(Rule):
"""Alternative rule."""
def __init__(self, name, prop_name: str, **kwargs):
super().__init__(name, **kwargs)
self.prop_name = prop_name
def execute(self, props, pv_props, context):
"""Execute the rule against properties."""
if f'_{self.prop_name}' in pv_props and self.prop_name not in props:
return pv_props.get(f'_{self.prop_name}')
|
c9491f47e1fc98e0a6aadf9bf379f21112768332 | platformio/builder/scripts/windows_x86.py | platformio/builder/scripts/windows_x86.py | # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
SIZEPRINTCMD="size $SOURCES",
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
AR="i586-mingw32-ar",
AS="i586-mingw32-as",
CC="i586-mingw32-gcc",
CXX="i586-mingw32-g++",
OBJCOPY="i586-mingw32-objcopy",
RANLIB="i586-mingw32-ranlib",
SIZETOOL="i586-mingw32-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES'
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
| # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
AR="$_MINGWPREFIX-ar",
AS="$_MINGWPREFIX-as",
CC="$_MINGWPREFIX-gcc",
CXX="$_MINGWPREFIX-g++",
OBJCOPY="$_MINGWPREFIX-objcopy",
RANLIB="$_MINGWPREFIX-ranlib",
SIZETOOL="$_MINGWPREFIX-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES',
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
_MINGWPREFIX="i586-mingw32"
)
elif get_systype() in ("linux_x86_64", "linux_i686"):
env.Replace(
_MINGWPREFIX="i686-w64-mingw32"
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
| Add support for mingw-linux toolchains | Add support for mingw-linux toolchains
| Python | apache-2.0 | ZachMassia/platformio,platformio/platformio-core,mseroczynski/platformio,eiginn/platformio,valeros/platformio,mcanthony/platformio,platformio/platformio,dkuku/platformio,platformio/platformio-core,atyenoria/platformio,mplewis/platformio | # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
SIZEPRINTCMD="size $SOURCES",
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
AR="i586-mingw32-ar",
AS="i586-mingw32-as",
CC="i586-mingw32-gcc",
CXX="i586-mingw32-g++",
OBJCOPY="i586-mingw32-objcopy",
RANLIB="i586-mingw32-ranlib",
SIZETOOL="i586-mingw32-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES'
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
Add support for mingw-linux toolchains | # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
AR="$_MINGWPREFIX-ar",
AS="$_MINGWPREFIX-as",
CC="$_MINGWPREFIX-gcc",
CXX="$_MINGWPREFIX-g++",
OBJCOPY="$_MINGWPREFIX-objcopy",
RANLIB="$_MINGWPREFIX-ranlib",
SIZETOOL="$_MINGWPREFIX-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES',
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
_MINGWPREFIX="i586-mingw32"
)
elif get_systype() in ("linux_x86_64", "linux_i686"):
env.Replace(
_MINGWPREFIX="i686-w64-mingw32"
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
| <commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
SIZEPRINTCMD="size $SOURCES",
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
AR="i586-mingw32-ar",
AS="i586-mingw32-as",
CC="i586-mingw32-gcc",
CXX="i586-mingw32-g++",
OBJCOPY="i586-mingw32-objcopy",
RANLIB="i586-mingw32-ranlib",
SIZETOOL="i586-mingw32-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES'
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
<commit_msg>Add support for mingw-linux toolchains<commit_after> | # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
AR="$_MINGWPREFIX-ar",
AS="$_MINGWPREFIX-as",
CC="$_MINGWPREFIX-gcc",
CXX="$_MINGWPREFIX-g++",
OBJCOPY="$_MINGWPREFIX-objcopy",
RANLIB="$_MINGWPREFIX-ranlib",
SIZETOOL="$_MINGWPREFIX-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES',
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
_MINGWPREFIX="i586-mingw32"
)
elif get_systype() in ("linux_x86_64", "linux_i686"):
env.Replace(
_MINGWPREFIX="i686-w64-mingw32"
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
| # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
SIZEPRINTCMD="size $SOURCES",
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
AR="i586-mingw32-ar",
AS="i586-mingw32-as",
CC="i586-mingw32-gcc",
CXX="i586-mingw32-g++",
OBJCOPY="i586-mingw32-objcopy",
RANLIB="i586-mingw32-ranlib",
SIZETOOL="i586-mingw32-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES'
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
Add support for mingw-linux toolchains# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
AR="$_MINGWPREFIX-ar",
AS="$_MINGWPREFIX-as",
CC="$_MINGWPREFIX-gcc",
CXX="$_MINGWPREFIX-g++",
OBJCOPY="$_MINGWPREFIX-objcopy",
RANLIB="$_MINGWPREFIX-ranlib",
SIZETOOL="$_MINGWPREFIX-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES',
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
_MINGWPREFIX="i586-mingw32"
)
elif get_systype() in ("linux_x86_64", "linux_i686"):
env.Replace(
_MINGWPREFIX="i686-w64-mingw32"
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
| <commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
SIZEPRINTCMD="size $SOURCES",
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
AR="i586-mingw32-ar",
AS="i586-mingw32-as",
CC="i586-mingw32-gcc",
CXX="i586-mingw32-g++",
OBJCOPY="i586-mingw32-objcopy",
RANLIB="i586-mingw32-ranlib",
SIZETOOL="i586-mingw32-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES'
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
<commit_msg>Add support for mingw-linux toolchains<commit_after># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
"""
Builder for Windows x86
"""
from SCons.Script import AlwaysBuild, Default, DefaultEnvironment
from platformio.util import get_systype
env = DefaultEnvironment()
env.Replace(
AR="$_MINGWPREFIX-ar",
AS="$_MINGWPREFIX-as",
CC="$_MINGWPREFIX-gcc",
CXX="$_MINGWPREFIX-g++",
OBJCOPY="$_MINGWPREFIX-objcopy",
RANLIB="$_MINGWPREFIX-ranlib",
SIZETOOL="$_MINGWPREFIX-size",
SIZEPRINTCMD='"$SIZETOOL" $SOURCES',
PROGSUFFIX=".exe"
)
if get_systype() == "darwin_x86_64":
env.Replace(
_MINGWPREFIX="i586-mingw32"
)
elif get_systype() in ("linux_x86_64", "linux_i686"):
env.Replace(
_MINGWPREFIX="i686-w64-mingw32"
)
#
# Target: Build executable program
#
target_bin = env.BuildProgram()
#
# Target: Print binary size
#
target_size = env.Alias("size", target_bin, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Define targets
#
Default([target_bin])
|
e4ae1bae94eb2afd5ea851d4ce6528f81c46c32d | pebble/PblCommand.py | pebble/PblCommand.py | import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd> | Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd>
| Python | mit | pebble/libpebble,pebble/libpebble,pebble/libpebble,pebble/libpebble | import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd> | import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| <commit_before>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
<commit_msg>Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd><commit_after> | import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| <commit_before>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
<commit_msg>Allow user to enter pebble <cmd> --debug as well as pebble --debug <cmd><commit_after>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
a17933c7806634391137244e2c17327898187146 | djstripe/__init__.py | djstripe/__init__.py | """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
| """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
| Set dj-stripe as stripe app info | Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo
| Python | mit | pydanny/dj-stripe,pydanny/dj-stripe,jleclanche/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe,jleclanche/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe | """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo | """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
| <commit_before>"""
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
<commit_msg>Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo<commit_after> | """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
| """
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo"""
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
| <commit_before>"""
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
<commit_msg>Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo<commit_after>"""
.. module:: djstripe.
:synopsis: dj-stripe - Django + Stripe Made Easy
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
|
26d2e13945f4780ff74dfe99695be7045fb9ed39 | piper/prop.py | piper/prop.py | import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
| import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
elif isinstance(v, list):
# Make lists have keys like 'foo.bar.x'
for x, item in enumerate(v):
key = '{2}{0}{1}'.format(sep, x, new_key)
items.append((key, item))
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
| Add PropBase.flatten() support for flattening lists | Add PropBase.flatten() support for flattening lists
| Python | mit | thiderman/piper | import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
Add PropBase.flatten() support for flattening lists | import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
elif isinstance(v, list):
# Make lists have keys like 'foo.bar.x'
for x, item in enumerate(v):
key = '{2}{0}{1}'.format(sep, x, new_key)
items.append((key, item))
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
| <commit_before>import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
<commit_msg>Add PropBase.flatten() support for flattening lists<commit_after> | import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
elif isinstance(v, list):
# Make lists have keys like 'foo.bar.x'
for x, item in enumerate(v):
key = '{2}{0}{1}'.format(sep, x, new_key)
items.append((key, item))
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
| import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
Add PropBase.flatten() support for flattening listsimport facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
elif isinstance(v, list):
# Make lists have keys like 'foo.bar.x'
for x, item in enumerate(v):
key = '{2}{0}{1}'.format(sep, x, new_key)
items.append((key, item))
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
| <commit_before>import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
<commit_msg>Add PropBase.flatten() support for flattening lists<commit_after>import facter
from collections import MutableMapping
from piper.abc import DynamicItem
class PropBase(DynamicItem):
def __init__(self):
super(PropBase, self).__init__(None)
self._props = None
@property
def properties(self):
"""
Collect system properties and return a dictionary of them
"""
raise NotImplementedError()
@property
def namespace(self):
return '.'.join((self.__module__, self.__class__.__name__))
# http://stackoverflow.com/questions/6027558
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self.flatten(v, new_key).items())
elif isinstance(v, list):
# Make lists have keys like 'foo.bar.x'
for x, item in enumerate(v):
key = '{2}{0}{1}'.format(sep, x, new_key)
items.append((key, item))
else:
items.append((new_key, v))
return dict(items)
class FacterProp(PropBase):
"""
Collect properties from facter via facterpy
It should be noted that the current version does not have any typecasting,
so everything is always strings.
See https://github.com/knorby/facterpy/issues/5
"""
@property
def properties(self):
if self._props is None:
facts = facter.Facter().all
self._props = self.flatten(facts)
return self._props
|
6c7ca64fbd93ab52dfc1ba792fd314395483d651 | piazza_api/piazza.py | piazza_api/piazza.py | class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
| from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
| Add login methods to Piazza | feat(user): Add login methods to Piazza
| Python | mit | hfaran/piazza-api,kwangkim/piazza-api | class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
feat(user): Add login methods to Piazza | from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
| <commit_before>class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
<commit_msg>feat(user): Add login methods to Piazza<commit_after> | from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
| class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
feat(user): Add login methods to Piazzafrom .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
| <commit_before>class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
<commit_msg>feat(user): Add login methods to Piazza<commit_after>from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.